From 6709b096466855e48c02f69e636843dbbdb2c344 Mon Sep 17 00:00:00 2001 From: Vikrant Gupta Date: Tue, 9 Sep 2025 13:05:07 +0530 Subject: [PATCH 01/51] fix(user): populate correct error message on client (#9043) * fix(user): populate correct error message on client * fix(user): populate correct error message on client --- frontend/src/api/v1/login/loginPrecheck.ts | 2 +- frontend/src/api/v1/register/signup.ts | 20 ++++++-------- frontend/src/container/Login/index.tsx | 2 +- frontend/src/pages/SignUp/SignUp.tsx | 27 +++++++++---------- .../src/types/api/user/getInviteDetails.ts | 2 +- frontend/src/types/api/user/loginPrecheck.ts | 5 ++++ pkg/query-service/app/http_handler.go | 2 +- 7 files changed, 29 insertions(+), 31 deletions(-) diff --git a/frontend/src/api/v1/login/loginPrecheck.ts b/frontend/src/api/v1/login/loginPrecheck.ts index c0cdc3dcc43a..eac00182cb50 100644 --- a/frontend/src/api/v1/login/loginPrecheck.ts +++ b/frontend/src/api/v1/login/loginPrecheck.ts @@ -2,7 +2,7 @@ import axios from 'api'; import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; import { AxiosError } from 'axios'; import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps, Props } from 'types/api/user/loginPrecheck'; +import { Props, Signup as PayloadProps } from 'types/api/user/loginPrecheck'; const loginPrecheck = async ( props: Props, diff --git a/frontend/src/api/v1/register/signup.ts b/frontend/src/api/v1/register/signup.ts index fcb483dffbaf..5838a8e7adf0 100644 --- a/frontend/src/api/v1/register/signup.ts +++ b/frontend/src/api/v1/register/signup.ts @@ -1,25 +1,21 @@ import axios from 'api'; -import { ErrorResponseHandler } from 'api/ErrorResponseHandler'; +import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2'; import { AxiosError } from 'axios'; -import { ErrorResponse, SuccessResponse } from 'types/api'; -import { PayloadProps } from 'types/api/user/loginPrecheck'; +import { ErrorV2Resp, SuccessResponseV2 } from 'types/api'; +import { PayloadProps, Signup } from 'types/api/user/loginPrecheck'; import { Props } from 'types/api/user/signup'; -const signup = async ( - props: Props, -): Promise | ErrorResponse> => { +const signup = async (props: Props): Promise> => { try { - const response = await axios.post(`/register`, { + const response = await axios.post(`/register`, { ...props, }); return { - statusCode: 200, - error: null, - message: response.data.status, - payload: response.data?.data, + httpStatusCode: response.status, + data: response.data.data, }; } catch (error) { - return ErrorResponseHandler(error as AxiosError); + ErrorResponseHandlerV2(error as AxiosError); } }; diff --git a/frontend/src/container/Login/index.tsx b/frontend/src/container/Login/index.tsx index 3ee204619869..33e8f0edac79 100644 --- a/frontend/src/container/Login/index.tsx +++ b/frontend/src/container/Login/index.tsx @@ -16,7 +16,7 @@ import { useAppContext } from 'providers/App/App'; import { useEffect, useState } from 'react'; import { useQuery } from 'react-query'; import APIError from 'types/api/error'; -import { PayloadProps as PrecheckResultType } from 'types/api/user/loginPrecheck'; +import { Signup as PrecheckResultType } from 'types/api/user/loginPrecheck'; import { FormContainer, Label, ParentContainer } from './styles'; diff --git a/frontend/src/pages/SignUp/SignUp.tsx b/frontend/src/pages/SignUp/SignUp.tsx index 03d44bbfdc05..370955fd52af 100644 --- a/frontend/src/pages/SignUp/SignUp.tsx +++ b/frontend/src/pages/SignUp/SignUp.tsx @@ -10,13 +10,14 @@ import afterLogin from 'AppRoutes/utils'; import ROUTES from 'constants/routes'; import { useNotifications } from 'hooks/useNotifications'; import history from 'lib/history'; +import { useErrorModal } from 'providers/ErrorModalProvider'; import { useEffect, useState } from 'react'; import { useQuery } from 'react-query'; import { useLocation } from 'react-router-dom'; import { SuccessResponseV2 } from 'types/api'; import APIError from 'types/api/error'; import { InviteDetails } from 'types/api/user/getInviteDetails'; -import { PayloadProps as LoginPrecheckPayloadProps } from 'types/api/user/loginPrecheck'; +import { Signup as LoginPrecheckPayloadProps } from 'types/api/user/loginPrecheck'; import { FormContainer, Label } from './styles'; import { isPasswordNotValidMessage, isPasswordValid } from './utils'; @@ -109,31 +110,27 @@ function SignUp(): JSX.Element { ]); const isSignUp = token === null; + const { showErrorModal } = useErrorModal(); const signUp = async (values: FormValues): Promise => { try { const { organizationName, password, email } = values; - const response = await signUpApi({ + await signUpApi({ email, orgDisplayName: organizationName, password, token: params.get('token') || undefined, }); - if (response.statusCode === 200) { - const loginResponse = await loginApi({ - email, - password, - }); - - const { data } = loginResponse; - await afterLogin(data.userId, data.accessJwt, data.refreshJwt); - } - } catch (error) { - notifications.error({ - message: (error as APIError).getErrorCode(), - description: (error as APIError).getErrorMessage(), + const loginResponse = await loginApi({ + email, + password, }); + + const { data } = loginResponse; + await afterLogin(data.userId, data.accessJwt, data.refreshJwt); + } catch (error) { + showErrorModal(error as APIError); } }; diff --git a/frontend/src/types/api/user/getInviteDetails.ts b/frontend/src/types/api/user/getInviteDetails.ts index 88807a3fd87f..6bc3754061dc 100644 --- a/frontend/src/types/api/user/getInviteDetails.ts +++ b/frontend/src/types/api/user/getInviteDetails.ts @@ -2,7 +2,7 @@ import { User } from 'types/reducer/app'; import { ROLES } from 'types/roles'; import { Organization } from './getOrganization'; -import { PayloadProps as LoginPrecheckPayloadProps } from './loginPrecheck'; +import { Signup as LoginPrecheckPayloadProps } from './loginPrecheck'; export interface Props { inviteId: string; diff --git a/frontend/src/types/api/user/loginPrecheck.ts b/frontend/src/types/api/user/loginPrecheck.ts index fed34eacec2a..2082013f07b9 100644 --- a/frontend/src/types/api/user/loginPrecheck.ts +++ b/frontend/src/types/api/user/loginPrecheck.ts @@ -1,4 +1,9 @@ export interface PayloadProps { + data: Signup; + status: string; +} + +export interface Signup { sso: boolean; ssoUrl?: string; canSelfRegister?: boolean; diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 478a3aa62011..5e7442a1d49d 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -2061,7 +2061,7 @@ func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) { var req types.PostableRegisterOrgAndAdmin if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - RespondError(w, &model.ApiError{Err: err, Typ: model.ErrorBadData}, nil) + render.Error(w, err) return } From 717efaf167c2e90d574ccb67ba74e9b7be01f4ec Mon Sep 17 00:00:00 2001 From: Amlan Kumar Nandy <45410599+amlannandy@users.noreply.github.com> Date: Tue, 9 Sep 2025 14:56:29 +0700 Subject: [PATCH 02/51] feat: base setup for new create alerts page (#8957) --- .../CreateAlertHeader/CreateAlertHeader.tsx | 47 ++ .../CreateAlertHeader/LabelsInput.tsx | 153 ++++++ .../__tests__/CreateAlertHeader.test.tsx | 56 ++ .../__tests__/LabelsInput.test.tsx | 503 ++++++++++++++++++ .../CreateAlertV2/CreateAlertHeader/index.ts | 3 + .../CreateAlertHeader/styles.scss | 151 ++++++ .../CreateAlertV2/CreateAlertHeader/types.ts | 12 + .../CreateAlertV2/CreateAlertV2.styles.scss | 3 + .../container/CreateAlertV2/CreateAlertV2.tsx | 16 + .../container/CreateAlertV2/Stepper/index.tsx | 18 + .../CreateAlertV2/Stepper/styles.scss | 44 ++ .../CreateAlertV2/context/constants.ts | 7 + .../container/CreateAlertV2/context/index.tsx | 58 ++ .../container/CreateAlertV2/context/types.ts | 31 ++ .../container/CreateAlertV2/context/utils.tsx | 26 + frontend/src/container/CreateAlertV2/index.ts | 3 + .../src/container/CreateAlertV2/utils.tsx | 3 + frontend/src/pages/CreateAlert/index.tsx | 8 + 18 files changed, 1142 insertions(+) create mode 100644 frontend/src/container/CreateAlertV2/CreateAlertHeader/CreateAlertHeader.tsx create mode 100644 frontend/src/container/CreateAlertV2/CreateAlertHeader/LabelsInput.tsx create mode 100644 frontend/src/container/CreateAlertV2/CreateAlertHeader/__tests__/CreateAlertHeader.test.tsx create mode 100644 frontend/src/container/CreateAlertV2/CreateAlertHeader/__tests__/LabelsInput.test.tsx create mode 100644 frontend/src/container/CreateAlertV2/CreateAlertHeader/index.ts create mode 100644 frontend/src/container/CreateAlertV2/CreateAlertHeader/styles.scss create mode 100644 frontend/src/container/CreateAlertV2/CreateAlertHeader/types.ts create mode 100644 frontend/src/container/CreateAlertV2/CreateAlertV2.styles.scss create mode 100644 frontend/src/container/CreateAlertV2/CreateAlertV2.tsx create mode 100644 frontend/src/container/CreateAlertV2/Stepper/index.tsx create mode 100644 frontend/src/container/CreateAlertV2/Stepper/styles.scss create mode 100644 frontend/src/container/CreateAlertV2/context/constants.ts create mode 100644 frontend/src/container/CreateAlertV2/context/index.tsx create mode 100644 frontend/src/container/CreateAlertV2/context/types.ts create mode 100644 frontend/src/container/CreateAlertV2/context/utils.tsx create mode 100644 frontend/src/container/CreateAlertV2/index.ts create mode 100644 frontend/src/container/CreateAlertV2/utils.tsx diff --git a/frontend/src/container/CreateAlertV2/CreateAlertHeader/CreateAlertHeader.tsx b/frontend/src/container/CreateAlertV2/CreateAlertHeader/CreateAlertHeader.tsx new file mode 100644 index 000000000000..af6ad2612c7b --- /dev/null +++ b/frontend/src/container/CreateAlertV2/CreateAlertHeader/CreateAlertHeader.tsx @@ -0,0 +1,47 @@ +import './styles.scss'; + +import { Labels } from 'types/api/alerts/def'; + +import { useCreateAlertState } from '../context'; +import LabelsInput from './LabelsInput'; + +function CreateAlertHeader(): JSX.Element { + const { alertState, setAlertState } = useCreateAlertState(); + + return ( +
+
+
New Alert Rule
+
+ +
+ + setAlertState({ type: 'SET_ALERT_NAME', payload: e.target.value }) + } + className="alert-header__input title" + placeholder="Enter alert rule name" + /> + + setAlertState({ type: 'SET_ALERT_DESCRIPTION', payload: e.target.value }) + } + className="alert-header__input description" + placeholder="Click to add description..." + /> + + setAlertState({ type: 'SET_ALERT_LABELS', payload: labels }) + } + /> +
+
+ ); +} + +export default CreateAlertHeader; diff --git a/frontend/src/container/CreateAlertV2/CreateAlertHeader/LabelsInput.tsx b/frontend/src/container/CreateAlertV2/CreateAlertHeader/LabelsInput.tsx new file mode 100644 index 000000000000..5b4bbfdb5679 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/CreateAlertHeader/LabelsInput.tsx @@ -0,0 +1,153 @@ +import { CloseOutlined } from '@ant-design/icons'; +import { useNotifications } from 'hooks/useNotifications'; +import React, { useCallback, useState } from 'react'; + +import { LabelInputState, LabelsInputProps } from './types'; + +function LabelsInput({ + labels, + onLabelsChange, +}: LabelsInputProps): JSX.Element { + const { notifications } = useNotifications(); + const [inputState, setInputState] = useState({ + key: '', + value: '', + isKeyInput: true, + }); + const [isAdding, setIsAdding] = useState(false); + + const handleAddLabelsClick = useCallback(() => { + setIsAdding(true); + setInputState({ key: '', value: '', isKeyInput: true }); + }, []); + + const handleKeyDown = useCallback( + // eslint-disable-next-line sonarjs/cognitive-complexity + (e: React.KeyboardEvent) => { + if (e.key === 'Enter') { + if (inputState.isKeyInput) { + // Check if input contains a colon (key:value format) + if (inputState.key.includes(':')) { + const [key, ...valueParts] = inputState.key.split(':'); + const value = valueParts.join(':'); // Rejoin in case value contains colons + + if (key.trim() && value.trim()) { + if (labels[key.trim()]) { + notifications.error({ + message: 'Label with this key already exists', + }); + return; + } + // Add the label immediately + const newLabels = { + ...labels, + [key.trim()]: value.trim(), + }; + onLabelsChange(newLabels); + + // Reset input state + setInputState({ key: '', value: '', isKeyInput: true }); + } + } else if (inputState.key.trim()) { + if (labels[inputState.key.trim()]) { + notifications.error({ + message: 'Label with this key already exists', + }); + return; + } + setInputState((prev) => ({ ...prev, isKeyInput: false })); + } + } else if (inputState.value.trim()) { + // Add the label + const newLabels = { + ...labels, + [inputState.key.trim()]: inputState.value.trim(), + }; + onLabelsChange(newLabels); + + // Reset and continue adding + setInputState({ key: '', value: '', isKeyInput: true }); + } + } else if (e.key === 'Escape') { + // Cancel adding + setIsAdding(false); + setInputState({ key: '', value: '', isKeyInput: true }); + } + }, + [inputState, labels, notifications, onLabelsChange], + ); + + const handleInputChange = useCallback( + (e: React.ChangeEvent) => { + if (inputState.isKeyInput) { + setInputState((prev) => ({ ...prev, key: e.target.value })); + } else { + setInputState((prev) => ({ ...prev, value: e.target.value })); + } + }, + [inputState.isKeyInput], + ); + + const handleRemoveLabel = useCallback( + (key: string) => { + const newLabels = { ...labels }; + delete newLabels[key]; + onLabelsChange(newLabels); + }, + [labels, onLabelsChange], + ); + + const handleBlur = useCallback(() => { + if (!inputState.key && !inputState.value) { + setIsAdding(false); + setInputState({ key: '', value: '', isKeyInput: true }); + } + }, [inputState]); + + return ( +
+ {Object.keys(labels).length > 0 && ( +
+ {Object.entries(labels).map(([key, value]) => ( + + {key}: {value} + + + ))} +
+ )} + + {!isAdding ? ( + + ) : ( +
+ +
+ )} +
+ ); +} + +export default LabelsInput; diff --git a/frontend/src/container/CreateAlertV2/CreateAlertHeader/__tests__/CreateAlertHeader.test.tsx b/frontend/src/container/CreateAlertV2/CreateAlertHeader/__tests__/CreateAlertHeader.test.tsx new file mode 100644 index 000000000000..978c359c2932 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/CreateAlertHeader/__tests__/CreateAlertHeader.test.tsx @@ -0,0 +1,56 @@ +/* eslint-disable react/jsx-props-no-spreading */ +import { fireEvent, render, screen } from '@testing-library/react'; + +import { CreateAlertProvider } from '../../context'; +import CreateAlertHeader from '../CreateAlertHeader'; + +const renderCreateAlertHeader = (): ReturnType => + render( + + + , + ); + +describe('CreateAlertHeader', () => { + it('renders the header with title', () => { + renderCreateAlertHeader(); + expect(screen.getByText('New Alert Rule')).toBeInTheDocument(); + }); + + it('renders name input with placeholder', () => { + renderCreateAlertHeader(); + const nameInput = screen.getByPlaceholderText('Enter alert rule name'); + expect(nameInput).toBeInTheDocument(); + }); + + it('renders description input with placeholder', () => { + renderCreateAlertHeader(); + const descriptionInput = screen.getByPlaceholderText( + 'Click to add description...', + ); + expect(descriptionInput).toBeInTheDocument(); + }); + + it('renders LabelsInput component', () => { + renderCreateAlertHeader(); + expect(screen.getByText('+ Add labels')).toBeInTheDocument(); + }); + + it('updates name when typing in name input', () => { + renderCreateAlertHeader(); + const nameInput = screen.getByPlaceholderText('Enter alert rule name'); + + fireEvent.change(nameInput, { target: { value: 'Test Alert' } }); + + expect(nameInput).toHaveValue('Test Alert'); + }); + + it('updates description when typing in description input', () => { + renderCreateAlertHeader(); + const descriptionInput = screen.getByPlaceholderText( + 'Click to add description...', + ); + fireEvent.change(descriptionInput, { target: { value: 'Test Description' } }); + expect(descriptionInput).toHaveValue('Test Description'); + }); +}); diff --git a/frontend/src/container/CreateAlertV2/CreateAlertHeader/__tests__/LabelsInput.test.tsx b/frontend/src/container/CreateAlertV2/CreateAlertHeader/__tests__/LabelsInput.test.tsx new file mode 100644 index 000000000000..ed040ae68439 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/CreateAlertHeader/__tests__/LabelsInput.test.tsx @@ -0,0 +1,503 @@ +/* eslint-disable react/jsx-props-no-spreading */ +import { fireEvent, render, screen } from '@testing-library/react'; + +import LabelsInput from '../LabelsInput'; +import { LabelsInputProps } from '../types'; + +// Mock the CloseOutlined icon +jest.mock('@ant-design/icons', () => ({ + CloseOutlined: (): JSX.Element => ×, +})); + +const mockOnLabelsChange = jest.fn(); + +const defaultProps: LabelsInputProps = { + labels: {}, + onLabelsChange: mockOnLabelsChange, +}; + +const ADD_LABELS_TEXT = '+ Add labels'; +const ENTER_KEY_PLACEHOLDER = 'Enter key'; +const ENTER_VALUE_PLACEHOLDER = 'Enter value'; + +const CLOSE_ICON_TEST_ID = 'close-icon'; +const SEVERITY_HIGH_TEXT = 'severity: high'; +const ENVIRONMENT_PRODUCTION_TEXT = 'environment: production'; +const SEVERITY_HIGH_KEY_VALUE = 'severity:high'; + +const renderLabelsInput = ( + props: Partial = {}, +): ReturnType => + render(); + +describe('LabelsInput', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + describe('Initial Rendering', () => { + it('renders add button when no labels exist', () => { + renderLabelsInput(); + expect(screen.getByText(ADD_LABELS_TEXT)).toBeInTheDocument(); + expect(screen.queryByTestId(CLOSE_ICON_TEST_ID)).not.toBeInTheDocument(); + }); + + it('renders existing labels when provided', () => { + const labels = { severity: 'high', environment: 'production' }; + renderLabelsInput({ labels }); + + expect(screen.getByText(SEVERITY_HIGH_TEXT)).toBeInTheDocument(); + expect(screen.getByText(ENVIRONMENT_PRODUCTION_TEXT)).toBeInTheDocument(); + expect(screen.getAllByTestId(CLOSE_ICON_TEST_ID)).toHaveLength(2); + }); + + it('does not render existing labels section when no labels', () => { + renderLabelsInput(); + expect(screen.queryByText(SEVERITY_HIGH_TEXT)).not.toBeInTheDocument(); + }); + }); + + describe('Adding Labels', () => { + it('shows input field when add button is clicked', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + + expect( + screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER), + ).toBeInTheDocument(); + expect(screen.queryByText(ADD_LABELS_TEXT)).not.toBeInTheDocument(); + }); + + it('switches from key input to value input on Enter', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + fireEvent.change(input, { target: { value: 'severity' } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + expect( + screen.getByPlaceholderText(ENTER_VALUE_PLACEHOLDER), + ).toBeInTheDocument(); + expect( + screen.queryByPlaceholderText(ENTER_KEY_PLACEHOLDER), + ).not.toBeInTheDocument(); + }); + + it('adds label when both key and value are provided', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Enter key + fireEvent.change(input, { target: { value: 'severity' } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + // Enter value + const valueInput = screen.getByPlaceholderText(ENTER_VALUE_PLACEHOLDER); + fireEvent.change(valueInput, { target: { value: 'high' } }); + fireEvent.keyDown(valueInput, { key: 'Enter' }); + + expect(mockOnLabelsChange).toHaveBeenCalledWith({ severity: 'high' }); + }); + + it('does not switch to value input if key is empty', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + fireEvent.keyDown(input, { key: 'Enter' }); + + expect( + screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER), + ).toBeInTheDocument(); + expect( + screen.queryByPlaceholderText(ENTER_VALUE_PLACEHOLDER), + ).not.toBeInTheDocument(); + }); + + it('does not add label if value is empty', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Enter key + fireEvent.change(input, { target: { value: 'severity' } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + // Try to add with empty value + const valueInput = screen.getByPlaceholderText(ENTER_VALUE_PLACEHOLDER); + fireEvent.keyDown(valueInput, { key: 'Enter' }); + + expect(mockOnLabelsChange).not.toHaveBeenCalled(); + }); + + it('trims whitespace from key and value', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Enter key with whitespace + fireEvent.change(input, { target: { value: ' severity ' } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + // Enter value with whitespace + const valueInput = screen.getByPlaceholderText(ENTER_VALUE_PLACEHOLDER); + fireEvent.change(valueInput, { target: { value: ' high ' } }); + fireEvent.keyDown(valueInput, { key: 'Enter' }); + + expect(mockOnLabelsChange).toHaveBeenCalledWith({ severity: 'high' }); + }); + + it('resets input state after adding label', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Add a label + fireEvent.change(input, { target: { value: 'severity' } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + const valueInput = screen.getByPlaceholderText(ENTER_VALUE_PLACEHOLDER); + fireEvent.change(valueInput, { target: { value: 'high' } }); + fireEvent.keyDown(valueInput, { key: 'Enter' }); + + // Should be back to key input + expect( + screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER), + ).toBeInTheDocument(); + expect( + screen.queryByPlaceholderText(ENTER_VALUE_PLACEHOLDER), + ).not.toBeInTheDocument(); + }); + }); + + describe('Removing Labels', () => { + it('removes label when close button is clicked', () => { + const labels = { severity: 'high', environment: 'production' }; + renderLabelsInput({ labels }); + + const removeButtons = screen.getAllByTestId(CLOSE_ICON_TEST_ID); + fireEvent.click(removeButtons[0]); + + expect(mockOnLabelsChange).toHaveBeenCalledWith({ + environment: 'production', + }); + }); + + it('calls onLabelsChange with empty object when last label is removed', () => { + const labels = { severity: 'high' }; + renderLabelsInput({ labels }); + + const removeButton = screen.getByTestId('close-icon'); + fireEvent.click(removeButton); + + expect(mockOnLabelsChange).toHaveBeenCalledWith({}); + }); + }); + + describe('Keyboard Interactions', () => { + it('cancels adding label on Escape key', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + fireEvent.keyDown(input, { key: 'Escape' }); + + expect(screen.getByText(ADD_LABELS_TEXT)).toBeInTheDocument(); + expect( + screen.queryByPlaceholderText(ENTER_KEY_PLACEHOLDER), + ).not.toBeInTheDocument(); + }); + + it('cancels adding label on Escape key in value input', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Enter key + fireEvent.change(input, { target: { value: 'severity' } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + // Cancel in value input + const valueInput = screen.getByPlaceholderText(ENTER_VALUE_PLACEHOLDER); + fireEvent.keyDown(valueInput, { key: 'Escape' }); + + expect(screen.getByText(ADD_LABELS_TEXT)).toBeInTheDocument(); + expect( + screen.queryByPlaceholderText(ENTER_VALUE_PLACEHOLDER), + ).not.toBeInTheDocument(); + }); + }); + + describe('Blur Behavior', () => { + it('closes input immediately when both key and value are empty', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + fireEvent.blur(input); + + // The input should close immediately when both key and value are empty + expect(screen.getByText(ADD_LABELS_TEXT)).toBeInTheDocument(); + expect( + screen.queryByPlaceholderText(ENTER_KEY_PLACEHOLDER), + ).not.toBeInTheDocument(); + }); + + it('does not close input immediately when key has value', () => { + jest.useFakeTimers(); + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + fireEvent.change(input, { target: { value: 'severity' } }); + fireEvent.blur(input); + + jest.advanceTimersByTime(200); + + expect( + screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER), + ).toBeInTheDocument(); + expect(screen.queryByText(ADD_LABELS_TEXT)).not.toBeInTheDocument(); + + jest.useRealTimers(); + }); + }); + + describe('Input Change Handling', () => { + it('updates key input value correctly', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + fireEvent.change(input, { target: { value: 'severity' } }); + + expect(input).toHaveValue('severity'); + }); + + it('updates value input correctly', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Enter key + fireEvent.change(input, { target: { value: 'severity' } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + // Update value + const valueInput = screen.getByPlaceholderText(ENTER_VALUE_PLACEHOLDER); + fireEvent.change(valueInput, { target: { value: 'high' } }); + + expect(valueInput).toHaveValue('high'); + }); + }); + + describe('Edge Cases', () => { + it('handles multiple labels correctly', () => { + const labels = { + severity: 'high', + environment: 'production', + service: 'api-gateway', + }; + renderLabelsInput({ labels }); + + expect(screen.getByText(SEVERITY_HIGH_TEXT)).toBeInTheDocument(); + expect(screen.getByText(ENVIRONMENT_PRODUCTION_TEXT)).toBeInTheDocument(); + expect(screen.getByText('service: api-gateway')).toBeInTheDocument(); + expect(screen.getAllByTestId(CLOSE_ICON_TEST_ID)).toHaveLength(3); + }); + + it('handles empty string values', () => { + const labels = { severity: '' }; + renderLabelsInput({ labels }); + + expect(screen.getByText(/severity/)).toBeInTheDocument(); + }); + + it('handles special characters in labels', () => { + const labels = { 'service-name': 'api-gateway-v1' }; + renderLabelsInput({ labels }); + + expect(screen.getByText('service-name: api-gateway-v1')).toBeInTheDocument(); + }); + + it('maintains focus on input after adding label', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Add a label + fireEvent.change(input, { target: { value: 'severity' } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + const valueInput = screen.getByPlaceholderText(ENTER_VALUE_PLACEHOLDER); + fireEvent.change(valueInput, { target: { value: 'high' } }); + fireEvent.keyDown(valueInput, { key: 'Enter' }); + + // Should be focused on new key input + const newInput = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + expect(newInput).toHaveFocus(); + }); + }); + + describe('Key:Value Format Support', () => { + it('adds label when key:value format is entered and Enter is pressed', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Enter key:value format + fireEvent.change(input, { target: { value: SEVERITY_HIGH_KEY_VALUE } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + expect(mockOnLabelsChange).toHaveBeenCalledWith({ severity: 'high' }); + }); + + it('trims whitespace from key and value in key:value format', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Enter key:value format with whitespace + fireEvent.change(input, { target: { value: ' severity : high ' } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + expect(mockOnLabelsChange).toHaveBeenCalledWith({ severity: 'high' }); + }); + + it('handles values with colons correctly', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Enter key:value format where value contains colons + fireEvent.change(input, { + target: { value: 'url:https://example.com:8080' }, + }); + fireEvent.keyDown(input, { key: 'Enter' }); + + expect(mockOnLabelsChange).toHaveBeenCalledWith({ + url: 'https://example.com:8080', + }); + }); + + it('does not add label if key is empty in key:value format', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Enter key:value format with empty key + fireEvent.change(input, { target: { value: ':high' } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + expect(mockOnLabelsChange).not.toHaveBeenCalled(); + }); + + it('does not add label if value is empty in key:value format', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Enter key:value format with empty value + fireEvent.change(input, { target: { value: 'severity:' } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + expect(mockOnLabelsChange).not.toHaveBeenCalled(); + }); + + it('does not add label if only colon is entered', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Enter only colon + fireEvent.change(input, { target: { value: ':' } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + expect(mockOnLabelsChange).not.toHaveBeenCalled(); + }); + + it('resets input state after adding label with key:value format', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Add label with key:value format + fireEvent.change(input, { target: { value: 'severity:high' } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + // Should be back to key input for next label + expect( + screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER), + ).toBeInTheDocument(); + expect( + screen.queryByPlaceholderText(ENTER_VALUE_PLACEHOLDER), + ).not.toBeInTheDocument(); + }); + + it('does not auto-save when typing key:value without pressing Enter', () => { + renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Type key:value format but don't press Enter + fireEvent.change(input, { target: { value: SEVERITY_HIGH_KEY_VALUE } }); + + // Should not have called onLabelsChange yet + expect(mockOnLabelsChange).not.toHaveBeenCalled(); + }); + + it('handles multiple key:value entries correctly', () => { + const { rerender } = renderLabelsInput(); + + fireEvent.click(screen.getByText(ADD_LABELS_TEXT)); + const input = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + + // Add first label + fireEvent.change(input, { target: { value: SEVERITY_HIGH_KEY_VALUE } }); + fireEvent.keyDown(input, { key: 'Enter' }); + + // Simulate parent component updating labels + const firstLabels = { severity: 'high' }; + rerender( + , + ); + + // Add second label + const newInput = screen.getByPlaceholderText(ENTER_KEY_PLACEHOLDER); + fireEvent.change(newInput, { target: { value: 'environment:production' } }); + fireEvent.keyDown(newInput, { key: 'Enter' }); + + // Check that we made two calls and the last one includes both labels + expect(mockOnLabelsChange).toHaveBeenCalledTimes(2); + expect(mockOnLabelsChange).toHaveBeenNthCalledWith(1, { severity: 'high' }); + expect(mockOnLabelsChange).toHaveBeenNthCalledWith(2, { + severity: 'high', + environment: 'production', + }); + }); + }); +}); diff --git a/frontend/src/container/CreateAlertV2/CreateAlertHeader/index.ts b/frontend/src/container/CreateAlertV2/CreateAlertHeader/index.ts new file mode 100644 index 000000000000..658ab98b3cb1 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/CreateAlertHeader/index.ts @@ -0,0 +1,3 @@ +import CreateAlertHeader from './CreateAlertHeader'; + +export default CreateAlertHeader; diff --git a/frontend/src/container/CreateAlertV2/CreateAlertHeader/styles.scss b/frontend/src/container/CreateAlertV2/CreateAlertHeader/styles.scss new file mode 100644 index 000000000000..c594cbebc226 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/CreateAlertHeader/styles.scss @@ -0,0 +1,151 @@ +.alert-header { + background-color: var(--bg-ink-500); + font-family: inherit; + color: var(--text-vanilla-100); + + /* Top bar with diagonal stripes */ + &__tab-bar { + height: 32px; + display: flex; + align-items: center; + background: repeating-linear-gradient( + -45deg, + #0f0f0f, + #0f0f0f 10px, + #101010 10px, + #101010 20px + ); + padding-left: 0; + } + + /* Tab block visuals */ + &__tab { + display: flex; + align-items: center; + background-color: var(--bg-ink-500); + padding: 0 12px; + height: 32px; + font-size: 13px; + color: var(--text-vanilla-100); + margin-left: 12px; + margin-top: 12px; + } + + &__tab::before { + content: '•'; + margin-right: 6px; + font-size: 14px; + color: var(--bg-slate-100); + } + + &__content { + padding: 16px; + background: var(--bg-ink-500); + display: flex; + flex-direction: column; + gap: 8px; + } + + &__input.title { + font-size: 18px; + font-weight: 500; + background-color: transparent; + color: var(--text-vanilla-100); + } + + &__input:focus, + &__input:active { + border: none; + outline: none; + } + + &__input.description { + font-size: 14px; + background-color: transparent; + color: var(--text-vanilla-300); + } +} + +.labels-input { + display: flex; + flex-direction: column; + gap: 8px; + + &__add-button { + width: fit-content; + font-size: 13px; + color: #ccc; + border: 1px solid #333; + background-color: transparent; + cursor: pointer; + padding: 4px 8px; + border-radius: 4px; + + &:hover { + border-color: #555; + color: #fff; + } + } + + &__existing-labels { + display: flex; + flex-wrap: wrap; + gap: 8px; + } + + &__label-pill { + display: inline-flex; + align-items: center; + gap: 6px; + background-color: #ad7f581a; + color: var(--bg-sienna-400); + padding: 4px 8px; + border-radius: 16px; + font-size: 12px; + border: 1px solid var(--bg-sienna-500); + font-family: 'Geist Mono'; + } + + &__remove-button { + background: none; + border: none; + color: var(--bg-sienna-400); + cursor: pointer; + padding: 0; + display: flex; + align-items: center; + justify-content: center; + font-size: 10px; + + &:hover { + color: var(--text-vanilla-100); + } + } + + &__input-container { + display: flex; + align-items: center; + background-color: transparent; + border: none; + } + + &__input { + flex: 1; + background-color: transparent; + border: none; + outline: none; + padding: 6px 8px; + color: #fff; + font-size: 13px; + + &::placeholder { + color: #888; + } + + &:focus, + &:active { + border: none; + outline: none; + } + } +} diff --git a/frontend/src/container/CreateAlertV2/CreateAlertHeader/types.ts b/frontend/src/container/CreateAlertV2/CreateAlertHeader/types.ts new file mode 100644 index 000000000000..da7b6f9f89d4 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/CreateAlertHeader/types.ts @@ -0,0 +1,12 @@ +import { Labels } from 'types/api/alerts/def'; + +export interface LabelsInputProps { + labels: Labels; + onLabelsChange: (labels: Labels) => void; +} + +export interface LabelInputState { + key: string; + value: string; + isKeyInput: boolean; +} diff --git a/frontend/src/container/CreateAlertV2/CreateAlertV2.styles.scss b/frontend/src/container/CreateAlertV2/CreateAlertV2.styles.scss new file mode 100644 index 000000000000..916a70f958d0 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/CreateAlertV2.styles.scss @@ -0,0 +1,3 @@ +.create-alert-v2-container { + background-color: var(--bg-ink-500); +} diff --git a/frontend/src/container/CreateAlertV2/CreateAlertV2.tsx b/frontend/src/container/CreateAlertV2/CreateAlertV2.tsx new file mode 100644 index 000000000000..b896f8d10de6 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/CreateAlertV2.tsx @@ -0,0 +1,16 @@ +import './CreateAlertV2.styles.scss'; + +import { CreateAlertProvider } from './context'; +import CreateAlertHeader from './CreateAlertHeader/CreateAlertHeader'; + +function CreateAlertV2(): JSX.Element { + return ( +
+ + + +
+ ); +} + +export default CreateAlertV2; diff --git a/frontend/src/container/CreateAlertV2/Stepper/index.tsx b/frontend/src/container/CreateAlertV2/Stepper/index.tsx new file mode 100644 index 000000000000..8988389a6109 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/Stepper/index.tsx @@ -0,0 +1,18 @@ +import './styles.scss'; + +interface StepperProps { + stepNumber: number; + label: string; +} + +function Stepper({ stepNumber, label }: StepperProps): JSX.Element { + return ( +
+
{stepNumber}
+
{label}
+
+
+ ); +} + +export default Stepper; diff --git a/frontend/src/container/CreateAlertV2/Stepper/styles.scss b/frontend/src/container/CreateAlertV2/Stepper/styles.scss new file mode 100644 index 000000000000..db56be0695a1 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/Stepper/styles.scss @@ -0,0 +1,44 @@ +.stepper-container { + display: flex; + align-items: center; + gap: 16px; + margin-bottom: 16px; + padding: 16px; + border-radius: 8px; +} + +.step-number { + width: 24px; + height: 24px; + border-radius: 50%; + background-color: var(--bg-robin-400); + color: var(--text-slate-400); + display: flex; + align-items: center; + justify-content: center; + font-weight: 600; + font-size: 14px; + flex-shrink: 0; + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; +} + +.step-label { + font-size: 12px; + line-height: 20px; + font-weight: 500; + color: #e5e7eb; + text-transform: uppercase; + letter-spacing: 0.1em; + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; + flex-shrink: 0; +} + +.dotted-line { + flex: 1; + height: 8px; + background-image: radial-gradient(circle, #4a4a4a 1px, transparent 1px); + background-size: 8px 8px; + background-repeat: repeat-x; + background-position: center; + margin-left: 8px; +} diff --git a/frontend/src/container/CreateAlertV2/context/constants.ts b/frontend/src/container/CreateAlertV2/context/constants.ts new file mode 100644 index 000000000000..989251873cc2 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/context/constants.ts @@ -0,0 +1,7 @@ +import { AlertState } from './types'; + +export const INITIAL_ALERT_STATE: AlertState = { + name: '', + description: '', + labels: {}, +}; diff --git a/frontend/src/container/CreateAlertV2/context/index.tsx b/frontend/src/container/CreateAlertV2/context/index.tsx new file mode 100644 index 000000000000..53839bbc0727 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/context/index.tsx @@ -0,0 +1,58 @@ +import { + createContext, + useContext, + useMemo, + useReducer, + useState, +} from 'react'; + +import { INITIAL_ALERT_STATE } from './constants'; +import { + AlertCreationStep, + ICreateAlertContextProps, + ICreateAlertProviderProps, +} from './types'; +import { alertCreationReducer } from './utils'; + +const CreateAlertContext = createContext(null); + +// Hook exposing context state for CreateAlert +export const useCreateAlertState = (): ICreateAlertContextProps => { + const context = useContext(CreateAlertContext); + if (!context) { + throw new Error( + 'useCreateAlertState must be used within CreateAlertProvider', + ); + } + return context; +}; + +export function CreateAlertProvider( + props: ICreateAlertProviderProps, +): JSX.Element { + const { children } = props; + + const [alertState, setAlertState] = useReducer( + alertCreationReducer, + INITIAL_ALERT_STATE, + ); + const [step, setStep] = useState( + AlertCreationStep.ALERT_DEFINITION, + ); + + const contextValue: ICreateAlertContextProps = useMemo( + () => ({ + alertState, + setAlertState, + step, + setStep, + }), + [alertState, setAlertState, step, setStep], + ); + + return ( + + {children} + + ); +} diff --git a/frontend/src/container/CreateAlertV2/context/types.ts b/frontend/src/container/CreateAlertV2/context/types.ts new file mode 100644 index 000000000000..be0f8e53ca2d --- /dev/null +++ b/frontend/src/container/CreateAlertV2/context/types.ts @@ -0,0 +1,31 @@ +import { Dispatch } from 'react'; +import { Labels } from 'types/api/alerts/def'; + +export interface ICreateAlertContextProps { + alertState: AlertState; + setAlertState: Dispatch; + step: AlertCreationStep; + setStep: Dispatch; +} + +export interface ICreateAlertProviderProps { + children: React.ReactNode; +} + +export enum AlertCreationStep { + ALERT_DEFINITION = 0, + ALERT_CONDITION = 1, + EVALUATION_SETTINGS = 2, + NOTIFICATION_SETTINGS = 3, +} + +export interface AlertState { + name: string; + description: string; + labels: Labels; +} + +export type CreateAlertAction = + | { type: 'SET_ALERT_NAME'; payload: string } + | { type: 'SET_ALERT_DESCRIPTION'; payload: string } + | { type: 'SET_ALERT_LABELS'; payload: Labels }; diff --git a/frontend/src/container/CreateAlertV2/context/utils.tsx b/frontend/src/container/CreateAlertV2/context/utils.tsx new file mode 100644 index 000000000000..d04a8d1b49d2 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/context/utils.tsx @@ -0,0 +1,26 @@ +import { AlertState, CreateAlertAction } from './types'; + +export const alertCreationReducer = ( + state: AlertState, + action: CreateAlertAction, +): AlertState => { + switch (action.type) { + case 'SET_ALERT_NAME': + return { + ...state, + name: action.payload, + }; + case 'SET_ALERT_DESCRIPTION': + return { + ...state, + description: action.payload, + }; + case 'SET_ALERT_LABELS': + return { + ...state, + labels: action.payload, + }; + default: + return state; + } +}; diff --git a/frontend/src/container/CreateAlertV2/index.ts b/frontend/src/container/CreateAlertV2/index.ts new file mode 100644 index 000000000000..a0ce7e7814e0 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/index.ts @@ -0,0 +1,3 @@ +import CreateAlertV2 from './CreateAlertV2'; + +export default CreateAlertV2; diff --git a/frontend/src/container/CreateAlertV2/utils.tsx b/frontend/src/container/CreateAlertV2/utils.tsx new file mode 100644 index 000000000000..9451a2fc55f7 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/utils.tsx @@ -0,0 +1,3 @@ +// UI side feature flag +export const showNewCreateAlertsPage = (): boolean => + localStorage.getItem('showNewCreateAlertsPage') === 'true'; diff --git a/frontend/src/pages/CreateAlert/index.tsx b/frontend/src/pages/CreateAlert/index.tsx index 1b5efbc4969c..9172adfefbff 100644 --- a/frontend/src/pages/CreateAlert/index.tsx +++ b/frontend/src/pages/CreateAlert/index.tsx @@ -1,6 +1,14 @@ import CreateAlertRule from 'container/CreateAlertRule'; +import CreateAlertV2 from 'container/CreateAlertV2'; +import { showNewCreateAlertsPage } from 'container/CreateAlertV2/utils'; function CreateAlertPage(): JSX.Element { + const showNewCreateAlertsPageFlag = showNewCreateAlertsPage(); + + if (showNewCreateAlertsPageFlag) { + return ; + } + return ; } From 57013e1c4f4288bacecfaa679bf465116c0adb0e Mon Sep 17 00:00:00 2001 From: "primus-bot[bot]" <171087277+primus-bot[bot]@users.noreply.github.com> Date: Tue, 9 Sep 2025 08:11:33 +0000 Subject: [PATCH 03/51] chore(release): bump to v0.94.1 (#9045) Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com> Co-authored-by: Vikrant Gupta --- deploy/docker-swarm/docker-compose.ha.yaml | 2 +- deploy/docker-swarm/docker-compose.yaml | 2 +- deploy/docker/docker-compose.ha.yaml | 2 +- deploy/docker/docker-compose.yaml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/deploy/docker-swarm/docker-compose.ha.yaml b/deploy/docker-swarm/docker-compose.ha.yaml index e5396e63d24f..a83f7f069273 100644 --- a/deploy/docker-swarm/docker-compose.ha.yaml +++ b/deploy/docker-swarm/docker-compose.ha.yaml @@ -176,7 +176,7 @@ services: # - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml signoz: !!merge <<: *db-depend - image: signoz/signoz:v0.94.0 + image: signoz/signoz:v0.94.1 command: - --config=/root/config/prometheus.yml ports: diff --git a/deploy/docker-swarm/docker-compose.yaml b/deploy/docker-swarm/docker-compose.yaml index 74fe91c55cb1..af44d8e36f60 100644 --- a/deploy/docker-swarm/docker-compose.yaml +++ b/deploy/docker-swarm/docker-compose.yaml @@ -117,7 +117,7 @@ services: # - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml signoz: !!merge <<: *db-depend - image: signoz/signoz:v0.94.0 + image: signoz/signoz:v0.94.1 command: - --config=/root/config/prometheus.yml ports: diff --git a/deploy/docker/docker-compose.ha.yaml b/deploy/docker/docker-compose.ha.yaml index 9c6298c937f2..d95ca53ed384 100644 --- a/deploy/docker/docker-compose.ha.yaml +++ b/deploy/docker/docker-compose.ha.yaml @@ -179,7 +179,7 @@ services: # - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml signoz: !!merge <<: *db-depend - image: signoz/signoz:${VERSION:-v0.94.0} + image: signoz/signoz:${VERSION:-v0.94.1} container_name: signoz command: - --config=/root/config/prometheus.yml diff --git a/deploy/docker/docker-compose.yaml b/deploy/docker/docker-compose.yaml index e3a856f1a924..0b7b87d1d813 100644 --- a/deploy/docker/docker-compose.yaml +++ b/deploy/docker/docker-compose.yaml @@ -111,7 +111,7 @@ services: # - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml signoz: !!merge <<: *db-depend - image: signoz/signoz:${VERSION:-v0.94.0} + image: signoz/signoz:${VERSION:-v0.94.1} container_name: signoz command: - --config=/root/config/prometheus.yml From c83eaf3d50594835aee8a29ea4e43419c296b269 Mon Sep 17 00:00:00 2001 From: Vibhu Pandey Date: Tue, 9 Sep 2025 15:44:27 +0530 Subject: [PATCH 04/51] chore: enable forbidigo and noerrors in depguard (#9047) * chore: enable forbidgo * chore: enable forbidgo --- .golangci.yml | 5 ++ cmd/community/server.go | 2 +- cmd/config.go | 12 ++-- cmd/enterprise/server.go | 2 +- pkg/config/resolver.go | 18 +++-- pkg/errors/code.go | 1 + pkg/errors/errors.go | 40 ++++++----- pkg/errors/errors_test.go | 2 +- pkg/factory/registry.go | 12 ++-- pkg/gateway/config.go | 10 ++- pkg/instrumentation/metric.go | 16 +++-- pkg/signoz/config.go | 67 ++++++++++--------- pkg/signoz/config_test.go | 5 +- pkg/smtp/client/auth.go | 5 +- pkg/smtp/client/smtp.go | 19 +++--- pkg/sqlmigration/013_update_organization.go | 3 +- pkg/sqlmigration/014_add_alertmanager.go | 2 +- ...pdate_dashboard_alert_and_saved_view_v5.go | 1 - pkg/sqlmigration/sqlmigration.go | 5 -- pkg/sqlmigrator/config.go | 9 ++- pkg/sqlmigrator/migrator.go | 5 +- pkg/sqlschema/sqlitesqlschema/ddl.go | 16 +++-- .../telemetrystorehook/logging.go | 3 +- pkg/types/authtypes/uuid.go | 5 +- pkg/types/pipelinetypes/time_parser.go | 5 +- pkg/types/ruletypes/recurrence.go | 5 +- pkg/types/ruletypes/templates.go | 5 +- pkg/types/ssotypes/google.go | 14 ++-- pkg/valuer/string.go | 7 +- pkg/valuer/uuid.go | 12 ++-- pkg/valuer/valuer.go | 6 ++ pkg/version/version.go | 8 +-- 32 files changed, 186 insertions(+), 141 deletions(-) diff --git a/.golangci.yml b/.golangci.yml index c2d325f5e84c..00643925fdef 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -8,6 +8,7 @@ linters: - depguard - iface - unparam + - forbidigo linters-settings: sloglint: @@ -24,6 +25,10 @@ linters-settings: deny: - pkg: "go.uber.org/zap" desc: "Do not use zap logger. Use slog instead." + noerrors: + deny: + - pkg: "errors" + desc: "Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead." iface: enable: - identical diff --git a/cmd/community/server.go b/cmd/community/server.go index 9def8a147ace..a437b450c172 100644 --- a/cmd/community/server.go +++ b/cmd/community/server.go @@ -32,7 +32,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) { Short: "Run the SigNoz server", FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true}, RunE: func(currCmd *cobra.Command, args []string) error { - config, err := cmd.NewSigNozConfig(currCmd.Context(), flags) + config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags) if err != nil { return err } diff --git a/cmd/config.go b/cmd/config.go index 4e627c4f91d1..206d9b44d4c0 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -2,7 +2,6 @@ package cmd import ( "context" - "fmt" "log/slog" "os" @@ -12,9 +11,10 @@ import ( "github.com/SigNoz/signoz/pkg/signoz" ) -func NewSigNozConfig(ctx context.Context, flags signoz.DeprecatedFlags) (signoz.Config, error) { +func NewSigNozConfig(ctx context.Context, logger *slog.Logger, flags signoz.DeprecatedFlags) (signoz.Config, error) { config, err := signoz.NewConfig( ctx, + logger, config.ResolverConfig{ Uris: []string{"env:"}, ProviderFactories: []config.ProviderFactory{ @@ -31,14 +31,10 @@ func NewSigNozConfig(ctx context.Context, flags signoz.DeprecatedFlags) (signoz. return config, nil } -func NewJWTSecret(_ context.Context, _ *slog.Logger) string { +func NewJWTSecret(ctx context.Context, logger *slog.Logger) string { jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET") if len(jwtSecret) == 0 { - fmt.Println("🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!") - fmt.Println("SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application.") - fmt.Println("Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access.") - fmt.Println("Please set the SIGNOZ_JWT_SECRET environment variable immediately.") - fmt.Println("For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.") + logger.ErrorContext(ctx, "🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!", "error", "SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application. Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access. Please set the SIGNOZ_JWT_SECRET environment variable immediately. For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.") } return jwtSecret diff --git a/cmd/enterprise/server.go b/cmd/enterprise/server.go index 56344ea8b2eb..b513e9a744b2 100644 --- a/cmd/enterprise/server.go +++ b/cmd/enterprise/server.go @@ -35,7 +35,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) { Short: "Run the SigNoz server", FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true}, RunE: func(currCmd *cobra.Command, args []string) error { - config, err := cmd.NewSigNozConfig(currCmd.Context(), flags) + config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags) if err != nil { return err } diff --git a/pkg/config/resolver.go b/pkg/config/resolver.go index 98ef88f75399..0f13baf4d82b 100644 --- a/pkg/config/resolver.go +++ b/pkg/config/resolver.go @@ -2,8 +2,12 @@ package config import ( "context" - "errors" - "fmt" + + "github.com/SigNoz/signoz/pkg/errors" +) + +var ( + ErrCodeInvalidResolver = errors.MustNewCode("invalid_resolver") ) type ResolverConfig struct { @@ -24,11 +28,11 @@ type Resolver struct { func NewResolver(config ResolverConfig) (*Resolver, error) { if len(config.Uris) == 0 { - return nil, errors.New("cannot build resolver, no uris have been provided") + return nil, errors.New(errors.TypeInvalidInput, ErrCodeInvalidResolver, "cannot build resolver, no uris have been provided") } if len(config.ProviderFactories) == 0 { - return nil, errors.New("cannot build resolver, no providers have been provided") + return nil, errors.New(errors.TypeInvalidInput, ErrCodeInvalidResolver, "cannot build resolver, no providers have been provided") } uris := make([]Uri, len(config.Uris)) @@ -48,7 +52,7 @@ func NewResolver(config ResolverConfig) (*Resolver, error) { scheme := provider.Scheme() // Check that the scheme is unique. if _, ok := providers[scheme]; ok { - return nil, fmt.Errorf("cannot build resolver, duplicate scheme %q found", scheme) + return nil, errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidResolver, "cannot build resolver, duplicate scheme %q found", scheme) } providers[provider.Scheme()] = provider @@ -70,7 +74,7 @@ func (resolver *Resolver) Do(ctx context.Context) (*Conf, error) { } if err = conf.Merge(currentConf); err != nil { - return nil, fmt.Errorf("cannot merge config: %w", err) + return nil, errors.Newf(errors.TypeInternal, ErrCodeInvalidResolver, "cannot merge config: %s", err.Error()) } } @@ -80,7 +84,7 @@ func (resolver *Resolver) Do(ctx context.Context) (*Conf, error) { func (resolver *Resolver) get(ctx context.Context, uri Uri) (*Conf, error) { provider, ok := resolver.providers[uri.scheme] if !ok { - return nil, fmt.Errorf("cannot find provider with schema %q", uri.scheme) + return nil, errors.Newf(errors.TypeInternal, ErrCodeInvalidResolver, "cannot find provider with schema %q", uri.scheme) } return provider.Get(ctx, uri) diff --git a/pkg/errors/code.go b/pkg/errors/code.go index db1e167f0ef9..aeecbd000bea 100644 --- a/pkg/errors/code.go +++ b/pkg/errors/code.go @@ -16,6 +16,7 @@ var ( CodeForbidden = Code{"forbidden"} CodeCanceled = Code{"canceled"} CodeTimeout = Code{"timeout"} + CodeUnknown = Code{"unknown"} ) var ( diff --git a/pkg/errors/errors.go b/pkg/errors/errors.go index 6c1005733851..64d42666748e 100644 --- a/pkg/errors/errors.go +++ b/pkg/errors/errors.go @@ -1,15 +1,11 @@ package errors import ( - "errors" + "errors" //nolint:depguard "fmt" "log/slog" ) -var ( - codeUnknown Code = MustNewCode("unknown") -) - // base is the fundamental struct that implements the error interface. // The order of the struct is 'TCMEUA'. type base struct { @@ -59,7 +55,7 @@ func New(t typ, code Code, message string) *base { } // Newf returns a new base by formatting the error message with the supplied format specifier. -func Newf(t typ, code Code, format string, args ...interface{}) *base { +func Newf(t typ, code Code, format string, args ...any) *base { return &base{ t: t, c: code, @@ -70,7 +66,7 @@ func Newf(t typ, code Code, format string, args ...interface{}) *base { // Wrapf returns a new error by formatting the error message with the supplied format specifier // and wrapping another error with base. -func Wrapf(cause error, t typ, code Code, format string, args ...interface{}) *base { +func Wrapf(cause error, t typ, code Code, format string, args ...any) *base { return &base{ t: t, c: code, @@ -91,7 +87,7 @@ func Wrap(cause error, t typ, code Code, message string) *base { // WithAdditional wraps an existing base error with a new formatted message. // It is used when the original error already contains type and code. -func WithAdditional(cause error, format string, args ...interface{}) *base { +func WithAdditional(cause error, format string, args ...any) *base { t, c, m, e, u, a := Unwrapb(cause) b := &base{ t: t, @@ -142,7 +138,7 @@ func Unwrapb(cause error) (typ, Code, string, error, string, []string) { return base.t, base.c, base.m, base.e, base.u, base.a } - return TypeInternal, codeUnknown, cause.Error(), cause, "", []string{} + return TypeInternal, CodeUnknown, cause.Error(), cause, "", []string{} } // Ast checks if the provided error matches the specified custom error type. @@ -164,42 +160,52 @@ func Join(errs ...error) error { return errors.Join(errs...) } +// As is a wrapper around errors.As. func As(err error, target any) bool { return errors.As(err, target) } +// Is is a wrapper around errors.Is. func Is(err error, target error) bool { return errors.Is(err, target) } -func WrapNotFoundf(cause error, code Code, format string, args ...interface{}) *base { +// WrapNotFoundf is a wrapper around Wrapf with TypeNotFound. +func WrapNotFoundf(cause error, code Code, format string, args ...any) *base { return Wrapf(cause, TypeNotFound, code, format, args...) } -func NewNotFoundf(code Code, format string, args ...interface{}) *base { +// NewNotFoundf is a wrapper around Newf with TypeNotFound. +func NewNotFoundf(code Code, format string, args ...any) *base { return Newf(TypeNotFound, code, format, args...) } -func WrapInternalf(cause error, code Code, format string, args ...interface{}) *base { +// WrapInternalf is a wrapper around Wrapf with TypeInternal. +func WrapInternalf(cause error, code Code, format string, args ...any) *base { return Wrapf(cause, TypeInternal, code, format, args...) } -func NewInternalf(code Code, format string, args ...interface{}) *base { +// NewInternalf is a wrapper around Newf with TypeInternal. +func NewInternalf(code Code, format string, args ...any) *base { return Newf(TypeInternal, code, format, args...) } -func WrapInvalidInputf(cause error, code Code, format string, args ...interface{}) *base { +// WrapInvalidInputf is a wrapper around Wrapf with TypeInvalidInput. +func WrapInvalidInputf(cause error, code Code, format string, args ...any) *base { return Wrapf(cause, TypeInvalidInput, code, format, args...) } -func NewInvalidInputf(code Code, format string, args ...interface{}) *base { +// NewInvalidInputf is a wrapper around Newf with TypeInvalidInput. +func NewInvalidInputf(code Code, format string, args ...any) *base { return Newf(TypeInvalidInput, code, format, args...) } -func WrapUnexpectedf(cause error, code Code, format string, args ...interface{}) *base { +// WrapUnexpectedf is a wrapper around Wrapf with TypeUnexpected. +func WrapUnexpectedf(cause error, code Code, format string, args ...any) *base { return Wrapf(cause, TypeInvalidInput, code, format, args...) } -func NewUnexpectedf(code Code, format string, args ...interface{}) *base { +// NewUnexpectedf is a wrapper around Newf with TypeUnexpected. +func NewUnexpectedf(code Code, format string, args ...any) *base { return Newf(TypeInvalidInput, code, format, args...) } diff --git a/pkg/errors/errors_test.go b/pkg/errors/errors_test.go index e7c6932ec2cb..831f378e7bcc 100644 --- a/pkg/errors/errors_test.go +++ b/pkg/errors/errors_test.go @@ -1,7 +1,7 @@ package errors import ( - "errors" + "errors" //nolint:depguard "testing" "github.com/stretchr/testify/assert" diff --git a/pkg/factory/registry.go b/pkg/factory/registry.go index be7d95c6d4d7..80fdba13c4ee 100644 --- a/pkg/factory/registry.go +++ b/pkg/factory/registry.go @@ -2,12 +2,16 @@ package factory import ( "context" - "errors" - "fmt" "log/slog" "os" "os/signal" "syscall" + + "github.com/SigNoz/signoz/pkg/errors" +) + +var ( + ErrCodeInvalidRegistry = errors.MustNewCode("invalid_registry") ) type Registry struct { @@ -20,11 +24,11 @@ type Registry struct { // New creates a new registry of services. It needs at least one service in the input. func NewRegistry(logger *slog.Logger, services ...NamedService) (*Registry, error) { if logger == nil { - return nil, fmt.Errorf("cannot build registry, logger is required") + return nil, errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidRegistry, "cannot build registry, logger is required") } if len(services) == 0 { - return nil, fmt.Errorf("cannot build registry, at least one service is required") + return nil, errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidRegistry, "cannot build registry, at least one service is required") } m, err := NewNamedMap(services...) diff --git a/pkg/gateway/config.go b/pkg/gateway/config.go index 2771f6c1e639..e9a79b40c3af 100644 --- a/pkg/gateway/config.go +++ b/pkg/gateway/config.go @@ -1,12 +1,17 @@ package gateway import ( - "errors" "net/url" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" ) +var ( + ErrCodeInvalidGatewayConfig = errors.MustNewCode("invalid_gateway_config") +) + type Config struct { URL *url.URL `mapstructure:"url"` } @@ -27,7 +32,8 @@ func newConfig() factory.Config { func (c Config) Validate() error { if c.URL == nil { - return errors.New("url is required") + return errors.New(errors.TypeInvalidInput, ErrCodeInvalidGatewayConfig, "url is required") } + return nil } diff --git a/pkg/instrumentation/metric.go b/pkg/instrumentation/metric.go index 63bb0b302cf6..c52249cf15a5 100644 --- a/pkg/instrumentation/metric.go +++ b/pkg/instrumentation/metric.go @@ -2,12 +2,13 @@ package instrumentation import ( "context" - "errors" "fmt" "net" "net/http" "time" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promhttp" contribsdkconfig "go.opentelemetry.io/contrib/config" @@ -87,19 +88,19 @@ func prometheusReaderWithCustomRegistry(ctx context.Context, prometheusConfig *c reader, err := otelprom.New(opts...) if err != nil { - return nil, fmt.Errorf("error creating otel prometheus exporter: %w", err) + return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "error creating otel prometheus exporter: %s", err.Error()) } lis, err := net.Listen("tcp", addr) if err != nil { return nil, errors.Join( - fmt.Errorf("binding address %s for Prometheus exporter: %w", addr, err), + errors.Newf(errors.TypeInternal, errors.CodeInternal, "binding address %s for Prometheus exporter: %s", addr, err.Error()), reader.Shutdown(ctx), ) } go func() { if err := server.Serve(lis); err != nil && !errors.Is(err, http.ErrServerClosed) { - otel.Handle(fmt.Errorf("the Prometheus HTTP server exited unexpectedly: %w", err)) + otel.Handle(errors.Newf(errors.TypeInternal, errors.CodeInternal, "the Prometheus HTTP server exited unexpectedly: %s", err.Error())) } }() @@ -142,13 +143,13 @@ func meterProviderWithCustomRegistry(ctx context.Context, meterProviderConfig *c // metricReaderWithCustomRegistry creates metric readers with custom Prometheus registry support func metricReaderWithCustomRegistry(ctx context.Context, r contribsdkconfig.MetricReader, customRegistry *prometheus.Registry) (sdkmetric.Reader, error) { if r.Periodic != nil && r.Pull != nil { - return nil, errors.New("must not specify multiple metric reader type") + return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "must not specify multiple metric reader type") } if r.Pull != nil { return pullReaderWithCustomRegistry(ctx, r.Pull.Exporter, customRegistry) } - return nil, errors.New("no valid metric reader") + return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "no valid metric reader") } // pullReaderWithCustomRegistry creates pull readers with custom Prometheus registry support @@ -156,5 +157,6 @@ func pullReaderWithCustomRegistry(ctx context.Context, exporter contribsdkconfig if exporter.Prometheus != nil { return prometheusReaderWithCustomRegistry(ctx, exporter.Prometheus, customRegistry) } - return nil, errors.New("no valid metric exporter") + + return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "no valid metric exporter") } diff --git a/pkg/signoz/config.go b/pkg/signoz/config.go index 5eda9f98b0cb..5d568dda8ae9 100644 --- a/pkg/signoz/config.go +++ b/pkg/signoz/config.go @@ -3,6 +3,7 @@ package signoz import ( "context" "fmt" + "log/slog" "net/url" "os" "path" @@ -129,7 +130,7 @@ func (df *DeprecatedFlags) RegisterFlags(cmd *cobra.Command) { _ = cmd.Flags().MarkDeprecated("gateway-url", "use SIGNOZ_GATEWAY_URL instead") } -func NewConfig(ctx context.Context, resolverConfig config.ResolverConfig, deprecatedFlags DeprecatedFlags) (Config, error) { +func NewConfig(ctx context.Context, logger *slog.Logger, resolverConfig config.ResolverConfig, deprecatedFlags DeprecatedFlags) (Config, error) { configFactories := []factory.ConfigFactory{ version.NewConfigFactory(), instrumentation.NewConfigFactory(), @@ -161,7 +162,7 @@ func NewConfig(ctx context.Context, resolverConfig config.ResolverConfig, deprec return Config{}, err } - mergeAndEnsureBackwardCompatibility(&config, deprecatedFlags) + mergeAndEnsureBackwardCompatibility(ctx, logger, &config, deprecatedFlags) if err := validateConfig(config); err != nil { return Config{}, err @@ -186,88 +187,88 @@ func validateConfig(config Config) error { return nil } -func mergeAndEnsureBackwardCompatibility(config *Config, deprecatedFlags DeprecatedFlags) { +func mergeAndEnsureBackwardCompatibility(ctx context.Context, logger *slog.Logger, config *Config, deprecatedFlags DeprecatedFlags) { if os.Getenv("SIGNOZ_LOCAL_DB_PATH") != "" { - fmt.Println("[Deprecated] env SIGNOZ_LOCAL_DB_PATH is deprecated and scheduled for removal. Please use SIGNOZ_SQLSTORE_SQLITE_PATH instead.") + logger.WarnContext(ctx, "[Deprecated] env SIGNOZ_LOCAL_DB_PATH is deprecated and scheduled for removal. Please use SIGNOZ_SQLSTORE_SQLITE_PATH instead.") config.SQLStore.Sqlite.Path = os.Getenv("SIGNOZ_LOCAL_DB_PATH") } if os.Getenv("CONTEXT_TIMEOUT") != "" { - fmt.Println("[Deprecated] env CONTEXT_TIMEOUT is deprecated and scheduled for removal. Please use SIGNOZ_APISERVER_TIMEOUT_DEFAULT instead.") + logger.WarnContext(ctx, "[Deprecated] env CONTEXT_TIMEOUT is deprecated and scheduled for removal. Please use SIGNOZ_APISERVER_TIMEOUT_DEFAULT instead.") contextTimeoutDuration, err := time.ParseDuration(os.Getenv("CONTEXT_TIMEOUT") + "s") if err == nil { config.APIServer.Timeout.Default = contextTimeoutDuration } else { - fmt.Println("Error parsing CONTEXT_TIMEOUT, using default value of 60s") + logger.WarnContext(ctx, "Error parsing CONTEXT_TIMEOUT, using default value of 60s") } } if os.Getenv("CONTEXT_TIMEOUT_MAX_ALLOWED") != "" { - fmt.Println("[Deprecated] env CONTEXT_TIMEOUT_MAX_ALLOWED is deprecated and scheduled for removal. Please use SIGNOZ_APISERVER_TIMEOUT_MAX instead.") + logger.WarnContext(ctx, "[Deprecated] env CONTEXT_TIMEOUT_MAX_ALLOWED is deprecated and scheduled for removal. Please use SIGNOZ_APISERVER_TIMEOUT_MAX instead.") contextTimeoutDuration, err := time.ParseDuration(os.Getenv("CONTEXT_TIMEOUT_MAX_ALLOWED") + "s") if err == nil { config.APIServer.Timeout.Max = contextTimeoutDuration } else { - fmt.Println("Error parsing CONTEXT_TIMEOUT_MAX_ALLOWED, using default value of 600s") + logger.WarnContext(ctx, "Error parsing CONTEXT_TIMEOUT_MAX_ALLOWED, using default value of 600s") } } if os.Getenv("STORAGE") != "" { - fmt.Println("[Deprecated] env STORAGE is deprecated and scheduled for removal. Please use SIGNOZ_TELEMETRYSTORE_PROVIDER instead.") + logger.WarnContext(ctx, "[Deprecated] env STORAGE is deprecated and scheduled for removal. Please use SIGNOZ_TELEMETRYSTORE_PROVIDER instead.") config.TelemetryStore.Provider = os.Getenv("STORAGE") } if os.Getenv("ClickHouseUrl") != "" { - fmt.Println("[Deprecated] env ClickHouseUrl is deprecated and scheduled for removal. Please use SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN instead.") + logger.WarnContext(ctx, "[Deprecated] env ClickHouseUrl is deprecated and scheduled for removal. Please use SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN instead.") config.TelemetryStore.Clickhouse.DSN = os.Getenv("ClickHouseUrl") } if deprecatedFlags.MaxIdleConns != 50 { - fmt.Println("[Deprecated] flag --max-idle-conns is deprecated and scheduled for removal. Please use SIGNOZ_TELEMETRYSTORE_MAX__IDLE__CONNS instead.") + logger.WarnContext(ctx, "[Deprecated] flag --max-idle-conns is deprecated and scheduled for removal. Please use SIGNOZ_TELEMETRYSTORE_MAX__IDLE__CONNS instead.") config.TelemetryStore.Connection.MaxIdleConns = deprecatedFlags.MaxIdleConns } if deprecatedFlags.MaxOpenConns != 100 { - fmt.Println("[Deprecated] flag --max-open-conns is deprecated and scheduled for removal. Please use SIGNOZ_TELEMETRYSTORE_MAX__OPEN__CONNS instead.") + logger.WarnContext(ctx, "[Deprecated] flag --max-open-conns is deprecated and scheduled for removal. Please use SIGNOZ_TELEMETRYSTORE_MAX__OPEN__CONNS instead.") config.TelemetryStore.Connection.MaxOpenConns = deprecatedFlags.MaxOpenConns } if deprecatedFlags.DialTimeout != 5*time.Second { - fmt.Println("[Deprecated] flag --dial-timeout is deprecated and scheduled for removal. Please use SIGNOZ_TELEMETRYSTORE_DIAL__TIMEOUT instead.") + logger.WarnContext(ctx, "[Deprecated] flag --dial-timeout is deprecated and scheduled for removal. Please use SIGNOZ_TELEMETRYSTORE_DIAL__TIMEOUT instead.") config.TelemetryStore.Connection.DialTimeout = deprecatedFlags.DialTimeout } if os.Getenv("ALERTMANAGER_API_PREFIX") != "" { - fmt.Println("[Deprecated] env ALERTMANAGER_API_PREFIX is deprecated and scheduled for removal. Please use SIGNOZ_ALERTMANAGER_LEGACY_API__URL instead.") + logger.WarnContext(ctx, "[Deprecated] env ALERTMANAGER_API_PREFIX is deprecated and scheduled for removal. Please use SIGNOZ_ALERTMANAGER_LEGACY_API__URL instead.") u, err := url.Parse(os.Getenv("ALERTMANAGER_API_PREFIX")) if err != nil { - fmt.Println("Error parsing ALERTMANAGER_API_PREFIX, using default value") + logger.WarnContext(ctx, "Error parsing ALERTMANAGER_API_PREFIX, using default value") } else { config.Alertmanager.Legacy.ApiURL = u } } if os.Getenv("ALERTMANAGER_API_CHANNEL_PATH") != "" { - fmt.Println("[Deprecated] env ALERTMANAGER_API_CHANNEL_PATH is deprecated and scheduled for complete removal.") + logger.WarnContext(ctx, "[Deprecated] env ALERTMANAGER_API_CHANNEL_PATH is deprecated and scheduled for complete removal.") } if deprecatedFlags.Config != "" { - fmt.Println("[Deprecated] flag --config is deprecated for passing prometheus config. The flag will be used for passing the entire SigNoz config. More details can be found at https://github.com/SigNoz/signoz/issues/6805.") + logger.WarnContext(ctx, "[Deprecated] flag --config is deprecated for passing prometheus config. The flag will be used for passing the entire SigNoz config. More details can be found at https://github.com/SigNoz/signoz/issues/6805.") } if os.Getenv("INVITE_EMAIL_TEMPLATE") != "" { - fmt.Println("[Deprecated] env INVITE_EMAIL_TEMPLATE is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_TEMPLATES_DIRECTORY instead.") + logger.WarnContext(ctx, "[Deprecated] env INVITE_EMAIL_TEMPLATE is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_TEMPLATES_DIRECTORY instead.") config.Emailing.Templates.Directory = path.Dir(os.Getenv("INVITE_EMAIL_TEMPLATE")) } if os.Getenv("SMTP_ENABLED") != "" { - fmt.Println("[Deprecated] env SMTP_ENABLED is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_ENABLED instead.") + logger.WarnContext(ctx, "[Deprecated] env SMTP_ENABLED is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_ENABLED instead.") config.Emailing.Enabled = os.Getenv("SMTP_ENABLED") == "true" } if os.Getenv("SMTP_HOST") != "" { - fmt.Println("[Deprecated] env SMTP_HOST is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_ADDRESS instead.") + logger.WarnContext(ctx, "[Deprecated] env SMTP_HOST is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_ADDRESS instead.") if os.Getenv("SMTP_PORT") != "" { config.Emailing.SMTP.Address = os.Getenv("SMTP_HOST") + ":" + os.Getenv("SMTP_PORT") } else { @@ -276,62 +277,62 @@ func mergeAndEnsureBackwardCompatibility(config *Config, deprecatedFlags Depreca } if os.Getenv("SMTP_PORT") != "" { - fmt.Println("[Deprecated] env SMTP_PORT is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_ADDRESS instead.") + logger.WarnContext(ctx, "[Deprecated] env SMTP_PORT is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_ADDRESS instead.") } if os.Getenv("SMTP_USERNAME") != "" { - fmt.Println("[Deprecated] env SMTP_USERNAME is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_AUTH_USERNAME instead.") + logger.WarnContext(ctx, "[Deprecated] env SMTP_USERNAME is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_AUTH_USERNAME instead.") config.Emailing.SMTP.Auth.Username = os.Getenv("SMTP_USERNAME") } if os.Getenv("SMTP_PASSWORD") != "" { - fmt.Println("[Deprecated] env SMTP_PASSWORD is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_AUTH_PASSWORD instead.") + logger.WarnContext(ctx, "[Deprecated] env SMTP_PASSWORD is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_AUTH_PASSWORD instead.") config.Emailing.SMTP.Auth.Password = os.Getenv("SMTP_PASSWORD") } if os.Getenv("SMTP_FROM") != "" { - fmt.Println("[Deprecated] env SMTP_FROM is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_FROM instead.") + logger.WarnContext(ctx, "[Deprecated] env SMTP_FROM is deprecated and scheduled for removal. Please use SIGNOZ_EMAILING_FROM instead.") config.Emailing.SMTP.From = os.Getenv("SMTP_FROM") } if os.Getenv("SIGNOZ_SAAS_SEGMENT_KEY") != "" { - fmt.Println("[Deprecated] env SIGNOZ_SAAS_SEGMENT_KEY is deprecated and scheduled for removal. Please use SIGNOZ_ANALYTICS_SEGMENT_KEY instead.") + logger.WarnContext(ctx, "[Deprecated] env SIGNOZ_SAAS_SEGMENT_KEY is deprecated and scheduled for removal. Please use SIGNOZ_ANALYTICS_SEGMENT_KEY instead.") config.Analytics.Segment.Key = os.Getenv("SIGNOZ_SAAS_SEGMENT_KEY") } if os.Getenv("TELEMETRY_ENABLED") != "" { - fmt.Println("[Deprecated] env TELEMETRY_ENABLED is deprecated and scheduled for removal. Please use SIGNOZ_ANALYTICS_ENABLED instead.") + logger.WarnContext(ctx, "[Deprecated] env TELEMETRY_ENABLED is deprecated and scheduled for removal. Please use SIGNOZ_ANALYTICS_ENABLED instead.") config.Analytics.Enabled = os.Getenv("TELEMETRY_ENABLED") == "true" } if deprecatedFlags.FluxInterval != "" { - fmt.Println("[Deprecated] flag --flux-interval is deprecated and scheduled for removal. Please use SIGNOZ_QUERIER_FLUX__INTERVAL instead.") + logger.WarnContext(ctx, "[Deprecated] flag --flux-interval is deprecated and scheduled for removal. Please use SIGNOZ_QUERIER_FLUX__INTERVAL instead.") fluxInterval, err := time.ParseDuration(deprecatedFlags.FluxInterval) if err != nil { - fmt.Println("Error parsing --flux-interval, using default value.") + logger.WarnContext(ctx, "Error parsing --flux-interval, using default value.") } else { config.Querier.FluxInterval = fluxInterval } } if deprecatedFlags.FluxIntervalForTraceDetail != "" { - fmt.Println("[Deprecated] flag --flux-interval-for-trace-detail is deprecated and scheduled for complete removal. Please use SIGNOZ_QUERIER_FLUX__INTERVAL instead.") + logger.WarnContext(ctx, "[Deprecated] flag --flux-interval-for-trace-detail is deprecated and scheduled for complete removal. Please use SIGNOZ_QUERIER_FLUX__INTERVAL instead.") } if deprecatedFlags.Cluster != "" { - fmt.Println("[Deprecated] flag --cluster is deprecated and scheduled for removal. Please use SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER instead.") + logger.WarnContext(ctx, "[Deprecated] flag --cluster is deprecated and scheduled for removal. Please use SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER instead.") config.TelemetryStore.Clickhouse.Cluster = deprecatedFlags.Cluster } if deprecatedFlags.PreferSpanMetrics { - fmt.Println("[Deprecated] flag --prefer-span-metrics is deprecated and scheduled for removal. Please use USE_SPAN_METRICS instead.") + logger.WarnContext(ctx, "[Deprecated] flag --prefer-span-metrics is deprecated and scheduled for removal. Please use USE_SPAN_METRICS instead.") } if deprecatedFlags.GatewayUrl != "" { - fmt.Println("[Deprecated] flag --gateway-url is deprecated and scheduled for removal. Please use SIGNOZ_GATEWAY_URL instead.") + logger.WarnContext(ctx, "[Deprecated] flag --gateway-url is deprecated and scheduled for removal. Please use SIGNOZ_GATEWAY_URL instead.") u, err := url.Parse(deprecatedFlags.GatewayUrl) if err != nil { - fmt.Println("Error parsing --gateway-url, using default value.") + logger.WarnContext(ctx, "Error parsing --gateway-url, using default value.") } else { config.Gateway.URL = u } diff --git a/pkg/signoz/config_test.go b/pkg/signoz/config_test.go index 07aa01ae2405..9aca568e2697 100644 --- a/pkg/signoz/config_test.go +++ b/pkg/signoz/config_test.go @@ -2,6 +2,8 @@ package signoz import ( "context" + "io" + "log/slog" "testing" "github.com/SigNoz/signoz/pkg/config/configtest" @@ -11,6 +13,7 @@ import ( // This is a test to ensure that all fields of config implement the factory.Config interface and are valid with // their default values. func TestValidateConfig(t *testing.T) { - _, err := NewConfig(context.Background(), configtest.NewResolverConfig(), DeprecatedFlags{}) + logger := slog.New(slog.NewTextHandler(io.Discard, nil)) + _, err := NewConfig(context.Background(), logger, configtest.NewResolverConfig(), DeprecatedFlags{}) assert.NoError(t, err) } diff --git a/pkg/smtp/client/auth.go b/pkg/smtp/client/auth.go index 6385f9c13580..de00e60b4d60 100644 --- a/pkg/smtp/client/auth.go +++ b/pkg/smtp/client/auth.go @@ -1,9 +1,10 @@ package client import ( - "errors" "net/smtp" "strings" + + "github.com/SigNoz/signoz/pkg/errors" ) type loginAuth struct { @@ -27,7 +28,7 @@ func (auth *loginAuth) Next(fromServer []byte, more bool) ([]byte, error) { case "password:": return []byte(auth.password), nil default: - return nil, errors.New("unexpected server challenge") + return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "unexpected server challenge") } } return nil, nil diff --git a/pkg/smtp/client/smtp.go b/pkg/smtp/client/smtp.go index e3c07b4d673f..a97b5bb7c2c8 100644 --- a/pkg/smtp/client/smtp.go +++ b/pkg/smtp/client/smtp.go @@ -5,7 +5,6 @@ import ( "context" "crypto/tls" "crypto/x509" - "errors" "fmt" "log/slog" "math/rand" @@ -20,6 +19,8 @@ import ( "strings" "sync" "time" + + "github.com/SigNoz/signoz/pkg/errors" ) type Client struct { @@ -52,12 +53,12 @@ func New(address string, logger *slog.Logger, opts ...Option) (*Client, error) { from, err := mail.ParseAddress(clientOpts.from) if err != nil { - return nil, fmt.Errorf("parse 'from' address: %w", err) + return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "parse 'from' address: %s", err.Error()) } host, port, err := net.SplitHostPort(address) if err != nil { - return nil, fmt.Errorf("parse 'address': %w", err) + return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "parse 'address': %s", err.Error()) } if clientOpts.headers == nil { @@ -67,7 +68,7 @@ func New(address string, logger *slog.Logger, opts ...Option) (*Client, error) { tls, err := newTLSConfig(clientOpts.tls, host) if err != nil { - return nil, fmt.Errorf("create TLS config: %w", err) + return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "create TLS config: %s", err.Error()) } return &Client{ @@ -102,7 +103,7 @@ func (c *Client) Do(ctx context.Context, tos []*mail.Address, subject string, co smtpClient, err = smtp.NewClient(conn, c.host) if err != nil { conn.Close() - return fmt.Errorf("failed to create SMTP client: %w", err) + return errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to create SMTP client: %s", err.Error()) } // Try to clean up after ourselves but don't log anything if something has failed. @@ -275,7 +276,7 @@ func (c *Client) smtpAuth(_ context.Context, mechs string) (smtp.Auth, error) { case "CRAM-MD5": secret := c.auth.Secret if secret == "" { - errs = append(errs, errors.New("missing secret for CRAM-MD5 auth mechanism")) + errs = append(errs, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "missing secret for CRAM-MD5 auth mechanism")) continue } return smtp.CRAMMD5Auth(username, secret), nil @@ -283,7 +284,7 @@ func (c *Client) smtpAuth(_ context.Context, mechs string) (smtp.Auth, error) { case "PLAIN": password := c.auth.Password if password == "" { - errs = append(errs, errors.New("missing password for PLAIN auth mechanism")) + errs = append(errs, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "missing password for PLAIN auth mechanism")) continue } identity := c.auth.Identity @@ -292,7 +293,7 @@ func (c *Client) smtpAuth(_ context.Context, mechs string) (smtp.Auth, error) { case "LOGIN": password := c.auth.Password if password == "" { - errs = append(errs, errors.New("missing password for LOGIN auth mechanism")) + errs = append(errs, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "missing password for LOGIN auth mechanism")) continue } @@ -301,7 +302,7 @@ func (c *Client) smtpAuth(_ context.Context, mechs string) (smtp.Auth, error) { } if len(errs) == 0 { - errs = append(errs, errors.New("unknown auth mechanism: "+mechs)) + errs = append(errs, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "unknown auth mechanism: "+mechs)) } return nil, errors.Join(errs...) diff --git a/pkg/sqlmigration/013_update_organization.go b/pkg/sqlmigration/013_update_organization.go index 42a0e0dddcd1..19a1962de0fd 100644 --- a/pkg/sqlmigration/013_update_organization.go +++ b/pkg/sqlmigration/013_update_organization.go @@ -3,7 +3,8 @@ package sqlmigration import ( "context" "database/sql" - "errors" + + "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/sqlstore" diff --git a/pkg/sqlmigration/014_add_alertmanager.go b/pkg/sqlmigration/014_add_alertmanager.go index d45252914683..5696f54b4785 100644 --- a/pkg/sqlmigration/014_add_alertmanager.go +++ b/pkg/sqlmigration/014_add_alertmanager.go @@ -70,7 +70,7 @@ func (migration *addAlertmanager) Up(ctx context.Context, db *bun.DB) error { NewAddColumn(). Table("notification_channels"). ColumnExpr("org_id TEXT REFERENCES organizations(id) ON DELETE CASCADE"). - Exec(ctx); err != nil && err != ErrNoExecute { + Exec(ctx); err != nil { return err } } diff --git a/pkg/sqlmigration/046_update_dashboard_alert_and_saved_view_v5.go b/pkg/sqlmigration/046_update_dashboard_alert_and_saved_view_v5.go index 3b88c5c16e72..8c24819e8069 100644 --- a/pkg/sqlmigration/046_update_dashboard_alert_and_saved_view_v5.go +++ b/pkg/sqlmigration/046_update_dashboard_alert_and_saved_view_v5.go @@ -279,7 +279,6 @@ func (migration *queryBuilderV5Migration) migrateRules( updated := alertsMigrator.Migrate(ctx, rule.Data) if updated { - fmt.Println("updated rule", rule.ID) dataJSON, err := json.Marshal(rule.Data) if err != nil { return err diff --git a/pkg/sqlmigration/sqlmigration.go b/pkg/sqlmigration/sqlmigration.go index 3365c50ae15f..a3948405f428 100644 --- a/pkg/sqlmigration/sqlmigration.go +++ b/pkg/sqlmigration/sqlmigration.go @@ -2,7 +2,6 @@ package sqlmigration import ( "context" - "errors" "github.com/SigNoz/signoz/pkg/factory" "github.com/uptrace/bun" @@ -22,10 +21,6 @@ type SQLMigration interface { Down(context.Context, *bun.DB) error } -var ( - ErrNoExecute = errors.New("no execute") -) - var ( OrgReference = "org" UserReference = "user" diff --git a/pkg/sqlmigrator/config.go b/pkg/sqlmigrator/config.go index 1ea77ce573b2..d81c5b4ce1ba 100644 --- a/pkg/sqlmigrator/config.go +++ b/pkg/sqlmigrator/config.go @@ -1,12 +1,17 @@ package sqlmigrator import ( - "errors" "time" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" ) +var ( + ErrCodeInvalidSQLMigratorConfig = errors.MustNewCode("invalid_sqlmigrator_config") +) + type Config struct { // Lock is the lock configuration. Lock Lock `mapstructure:"lock"` @@ -34,7 +39,7 @@ func newConfig() factory.Config { func (c Config) Validate() error { if c.Lock.Timeout <= c.Lock.Interval { - return errors.New("lock::timeout must be greater than lock::interval") + return errors.New(errors.TypeInvalidInput, ErrCodeInvalidSQLMigratorConfig, "lock::timeout must be greater than lock::interval") } return nil diff --git a/pkg/sqlmigrator/migrator.go b/pkg/sqlmigrator/migrator.go index 646b895ae220..8cf6c79013ff 100644 --- a/pkg/sqlmigrator/migrator.go +++ b/pkg/sqlmigrator/migrator.go @@ -2,9 +2,10 @@ package sqlmigrator import ( "context" - "errors" "time" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/uptrace/bun/migrate" @@ -100,7 +101,7 @@ func (migrator *migrator) Lock(ctx context.Context) error { for { select { case <-timer.C: - err := errors.New("timed out waiting for lock") + err := errors.New(errors.TypeTimeout, errors.CodeTimeout, "timed out waiting for lock") migrator.settings.Logger().ErrorContext(ctx, "cannot acquire lock", "error", err, "lock_timeout", migrator.config.Lock.Timeout.String(), "dialect", migrator.dialect) return err case <-ticker.C: diff --git a/pkg/sqlschema/sqlitesqlschema/ddl.go b/pkg/sqlschema/sqlitesqlschema/ddl.go index 5f1afd8e1b24..d07864797a8c 100644 --- a/pkg/sqlschema/sqlitesqlschema/ddl.go +++ b/pkg/sqlschema/sqlitesqlschema/ddl.go @@ -1,17 +1,19 @@ package sqlitesqlschema import ( - "errors" "fmt" "regexp" "strings" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/sqlschema" ) // Inspired by https://github.com/go-gorm/sqlite var ( + ErrCodeInvalidDDL = errors.MustNewCode("invalid_ddl") sqliteSeparator = "`|\"|'|\t" sqliteIdentQuote = "`|\"|'" uniqueRegexp = regexp.MustCompile(fmt.Sprintf(`^(?:CONSTRAINT [%v]?[\w-]+[%v]? )?UNIQUE (.*)$`, sqliteSeparator, sqliteSeparator)) @@ -40,12 +42,12 @@ const ( func parseCreateTable(str string, fmter sqlschema.SQLFormatter) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) { sections := tableRegexp.FindStringSubmatch(str) if len(sections) == 0 { - return nil, nil, errors.New("invalid DDL") + return nil, nil, errors.New(errors.TypeInternal, ErrCodeInvalidDDL, "invalid DDL") } tableNameSections := tableNameRegexp.FindStringSubmatch(str) if len(tableNameSections) == 0 { - return nil, nil, errors.New("invalid DDL") + return nil, nil, errors.New(errors.TypeInternal, ErrCodeInvalidDDL, "invalid DDL") } tableName := sqlschema.TableName(tableNameSections[1]) @@ -97,14 +99,14 @@ func parseCreateTable(str string, fmter sqlschema.SQLFormatter) (*sqlschema.Tabl } if bracketLevel < 0 { - return nil, nil, errors.New("invalid DDL, unbalanced brackets") + return nil, nil, errors.New(errors.TypeInternal, ErrCodeInvalidDDL, "invalid DDL, unbalanced brackets") } buf += string(c) } if bracketLevel != 0 { - return nil, nil, errors.New("invalid DDL, unbalanced brackets") + return nil, nil, errors.New(errors.TypeInternal, ErrCodeInvalidDDL, "invalid DDL, unbalanced brackets") } if buf != "" { @@ -300,14 +302,14 @@ func parseAllColumns(in string) ([]sqlschema.ColumnName, error) { state = parseAllColumnsState_State_End continue } - return nil, fmt.Errorf("unexpected token: %s", string(s[i])) + return nil, errors.Newf(errors.TypeInternal, ErrCodeInvalidDDL, "unexpected token: %s", string(s[i])) case parseAllColumnsState_State_End: // break is automatic in Go switch statements } } if state != parseAllColumnsState_State_End { - return nil, errors.New("unexpected end") + return nil, errors.New(errors.TypeInternal, ErrCodeInvalidDDL, "unexpected end") } return columns, nil diff --git a/pkg/telemetrystore/telemetrystorehook/logging.go b/pkg/telemetrystore/telemetrystorehook/logging.go index fb11e259d635..c357aab1441e 100644 --- a/pkg/telemetrystore/telemetrystorehook/logging.go +++ b/pkg/telemetrystore/telemetrystorehook/logging.go @@ -3,10 +3,11 @@ package telemetrystorehook import ( "context" "database/sql" - "errors" "log/slog" "time" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/telemetrystore" ) diff --git a/pkg/types/authtypes/uuid.go b/pkg/types/authtypes/uuid.go index c387a8cd2e90..a3b61092d591 100644 --- a/pkg/types/authtypes/uuid.go +++ b/pkg/types/authtypes/uuid.go @@ -2,7 +2,8 @@ package authtypes import ( "context" - "errors" + + "github.com/SigNoz/signoz/pkg/errors" ) type uuidKey struct{} @@ -24,7 +25,7 @@ func (u *UUID) ContextFromRequest(ctx context.Context, values ...string) (contex } if value == "" { - return ctx, errors.New("missing Authorization header") + return ctx, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "missing Authorization header") } return NewContextWithUUID(ctx, value), nil diff --git a/pkg/types/pipelinetypes/time_parser.go b/pkg/types/pipelinetypes/time_parser.go index 35ab69a4085d..eaa80c1db25c 100644 --- a/pkg/types/pipelinetypes/time_parser.go +++ b/pkg/types/pipelinetypes/time_parser.go @@ -1,10 +1,11 @@ package pipelinetypes import ( - "errors" "fmt" "regexp" "strings" + + "github.com/SigNoz/signoz/pkg/errors" ) // Regex for strptime format placeholders supported by the time parser. @@ -106,7 +107,7 @@ func RegexForStrptimeLayout(layout string) (string, error) { if regex, ok := ctimeRegex[directive]; ok { return regex } - errs = append(errs, errors.New("unsupported ctimefmt directive: "+directive)) + errs = append(errs, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "unsupported ctimefmt directive: "+directive)) return "" } diff --git a/pkg/types/ruletypes/recurrence.go b/pkg/types/ruletypes/recurrence.go index e1f37c190d41..1c604be62b8b 100644 --- a/pkg/types/ruletypes/recurrence.go +++ b/pkg/types/ruletypes/recurrence.go @@ -3,8 +3,9 @@ package ruletypes import ( "database/sql/driver" "encoding/json" - "errors" "time" + + "github.com/SigNoz/signoz/pkg/errors" ) type RepeatType string @@ -61,7 +62,7 @@ func (d *Duration) UnmarshalJSON(b []byte) error { return nil default: - return errors.New("invalid duration") + return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid duration") } } diff --git a/pkg/types/ruletypes/templates.go b/pkg/types/ruletypes/templates.go index eaa433d21c7a..e698986eed01 100644 --- a/pkg/types/ruletypes/templates.go +++ b/pkg/types/ruletypes/templates.go @@ -3,7 +3,6 @@ package ruletypes import ( "bytes" "context" - "errors" "fmt" "math" "net/url" @@ -11,6 +10,8 @@ import ( "sort" "strings" + "github.com/SigNoz/signoz/pkg/errors" + html_template "html/template" text_template "text/template" @@ -73,7 +74,7 @@ func NewTemplateExpander( if len(v) > 0 { return v[0], nil } - return nil, errors.New("first() called on vector with no elements") + return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "first() called on vector with no elements") }, "label": func(label string, s *tmplQueryRecord) string { return s.Labels[label] diff --git a/pkg/types/ssotypes/google.go b/pkg/types/ssotypes/google.go index 623c5c3d4b50..fec3d1b7cb29 100644 --- a/pkg/types/ssotypes/google.go +++ b/pkg/types/ssotypes/google.go @@ -2,10 +2,10 @@ package ssotypes import ( "context" - "errors" - "fmt" "net/http" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/coreos/go-oidc/v3/oidc" "golang.org/x/oauth2" ) @@ -48,7 +48,7 @@ func (g *GoogleOAuthProvider) HandleCallback(r *http.Request) (identity *SSOIden token, err := g.OAuth2Config.Exchange(r.Context(), q.Get("code")) if err != nil { - return identity, fmt.Errorf("google: failed to get token: %v", err) + return identity, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: failed to get token: %v", err) } return g.createIdentity(r.Context(), token) @@ -57,11 +57,11 @@ func (g *GoogleOAuthProvider) HandleCallback(r *http.Request) (identity *SSOIden func (g *GoogleOAuthProvider) createIdentity(ctx context.Context, token *oauth2.Token) (identity *SSOIdentity, err error) { rawIDToken, ok := token.Extra("id_token").(string) if !ok { - return identity, errors.New("google: no id_token in token response") + return identity, errors.New(errors.TypeInternal, errors.CodeInternal, "google: no id_token in token response") } idToken, err := g.Verifier.Verify(ctx, rawIDToken) if err != nil { - return identity, fmt.Errorf("google: failed to verify ID Token: %v", err) + return identity, errors.Newf(errors.TypeInternal, errors.CodeInternal, "google: failed to verify ID Token: %v", err) } var claims struct { @@ -71,11 +71,11 @@ func (g *GoogleOAuthProvider) createIdentity(ctx context.Context, token *oauth2. HostedDomain string `json:"hd"` } if err := idToken.Claims(&claims); err != nil { - return identity, fmt.Errorf("oidc: failed to decode claims: %v", err) + return identity, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: failed to decode claims: %v", err) } if claims.HostedDomain != g.HostedDomain { - return identity, fmt.Errorf("oidc: unexpected hd claim %v", claims.HostedDomain) + return identity, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: unexpected hd claim %v", claims.HostedDomain) } identity = &SSOIdentity{ diff --git a/pkg/valuer/string.go b/pkg/valuer/string.go index ce2b5f7ea46a..687cb19cab5e 100644 --- a/pkg/valuer/string.go +++ b/pkg/valuer/string.go @@ -3,9 +3,10 @@ package valuer import ( "database/sql/driver" "encoding/json" - "fmt" "reflect" "strings" + + "github.com/SigNoz/signoz/pkg/errors" ) var _ Valuer = (*String)(nil) @@ -50,7 +51,7 @@ func (enum String) Value() (driver.Value, error) { func (enum *String) Scan(val interface{}) error { if enum == nil { - return fmt.Errorf("string: (nil \"%s\")", reflect.TypeOf(enum).String()) + return errors.Newf(errors.TypeInternal, ErrCodeUnknownValuerScan, "string: (nil \"%s\")", reflect.TypeOf(enum).String()) } if val == nil { @@ -61,7 +62,7 @@ func (enum *String) Scan(val interface{}) error { str, ok := val.(string) if !ok { - return fmt.Errorf("string: (non-string \"%s\")", reflect.TypeOf(val).String()) + return errors.Newf(errors.TypeInternal, ErrCodeUnknownValuerScan, "string: (non-string \"%s\")", reflect.TypeOf(val).String()) } *enum = NewString(str) diff --git a/pkg/valuer/uuid.go b/pkg/valuer/uuid.go index 04e9ec603759..d892bae66c69 100644 --- a/pkg/valuer/uuid.go +++ b/pkg/valuer/uuid.go @@ -3,9 +3,9 @@ package valuer import ( "database/sql/driver" "encoding/json" - "fmt" "reflect" + "github.com/SigNoz/signoz/pkg/errors" "github.com/google/uuid" ) @@ -98,11 +98,11 @@ func (enum UUID) Value() (driver.Value, error) { func (enum *UUID) Scan(val interface{}) error { if enum == nil { - return fmt.Errorf("uuid: (nil \"%s\")", reflect.TypeOf(enum).String()) + return errors.Newf(errors.TypeInternal, ErrCodeUnknownValuerScan, "uuid: (nil \"%s\")", reflect.TypeOf(enum).String()) } if val == nil { - return fmt.Errorf("uuid: (nil \"%s\")", reflect.TypeOf(val).String()) + return errors.Newf(errors.TypeInternal, ErrCodeUnknownValuerScan, "uuid: (nil \"%s\")", reflect.TypeOf(val).String()) } var enumVal UUID @@ -110,17 +110,17 @@ func (enum *UUID) Scan(val interface{}) error { case string: _enumVal, err := NewUUID(val) if err != nil { - return fmt.Errorf("uuid: (invalid-uuid \"%s\")", err.Error()) + return errors.Newf(errors.TypeInternal, ErrCodeUnknownValuerScan, "uuid: (invalid-uuid \"%s\")", err.Error()) } enumVal = _enumVal case []byte: _enumVal, err := NewUUIDFromBytes(val) if err != nil { - return fmt.Errorf("uuid: (invalid-uuid \"%s\")", err.Error()) + return errors.Newf(errors.TypeInternal, ErrCodeUnknownValuerScan, "uuid: (invalid-uuid \"%s\")", err.Error()) } enumVal = _enumVal default: - return fmt.Errorf("uuid: (non-uuid \"%s\")", reflect.TypeOf(val).String()) + return errors.Newf(errors.TypeInternal, ErrCodeUnknownValuerScan, "uuid: (non-uuid \"%s\")", reflect.TypeOf(val).String()) } *enum = enumVal diff --git a/pkg/valuer/valuer.go b/pkg/valuer/valuer.go index 7e65ac285746..138bfb426396 100644 --- a/pkg/valuer/valuer.go +++ b/pkg/valuer/valuer.go @@ -6,6 +6,12 @@ import ( "encoding" "encoding/json" "fmt" + + "github.com/SigNoz/signoz/pkg/errors" +) + +var ( + ErrCodeUnknownValuerScan = errors.MustNewCode("unknown_valuer_scan") ) type Valuer interface { diff --git a/pkg/version/version.go b/pkg/version/version.go index 8506468224da..1bfebe24979d 100644 --- a/pkg/version/version.go +++ b/pkg/version/version.go @@ -102,10 +102,10 @@ func (b Build) PrettyPrint(cfg Config) { " :**********= ", } - fmt.Println() + fmt.Println() //nolint:forbidigo for _, line := range ascii { - fmt.Print(line) - fmt.Println() + fmt.Print(line) //nolint:forbidigo + fmt.Println() //nolint:forbidigo } - fmt.Println() + fmt.Println() //nolint:forbidigo } From 6c7275d355647cc4817e35bb5a9a4210f2804ce5 Mon Sep 17 00:00:00 2001 From: Tushar Vats Date: Tue, 9 Sep 2025 17:04:40 +0530 Subject: [PATCH 05/51] Feat: API for exporting raw data (#8936) This introduces a new Raw Data Export module to the codebase, enabling users to export raw log data via a dedicated API endpoint. The changes include the implementation of the module and handler, integration with existing infrastructure, configuration updates, and adjustments to tests and module wiring. --- pkg/apiserver/config.go | 1 + .../implrawdataexport/constants.go | 19 + .../implrawdataexport/handler.go | 516 ++++++++++++++++ .../implrawdataexport/handler_test.go | 563 ++++++++++++++++++ .../rawdataexport/implrawdataexport/module.go | 89 +++ pkg/modules/rawdataexport/rawdataexport.go | 17 + .../app/cloudintegrations/controller_test.go | 8 +- pkg/query-service/app/http_handler.go | 3 + .../app/integrations/manager_test.go | 2 +- .../integration/filter_suggestions_test.go | 2 +- .../integration/logparsingpipeline_test.go | 2 +- .../signoz_cloud_integrations_test.go | 2 +- .../integration/signoz_integrations_test.go | 2 +- pkg/signoz/handler.go | 36 +- pkg/signoz/handler_test.go | 2 +- pkg/signoz/module.go | 42 +- pkg/signoz/module_test.go | 2 +- pkg/signoz/signoz.go | 2 +- pkg/telemetrylogs/const.go | 51 +- pkg/telemetrylogs/statement_builder.go | 37 +- pkg/types/ctxtypes/clickhouse.go | 14 + .../querybuildertypesv5/builder_elements.go | 7 + 22 files changed, 1368 insertions(+), 51 deletions(-) create mode 100644 pkg/modules/rawdataexport/implrawdataexport/constants.go create mode 100644 pkg/modules/rawdataexport/implrawdataexport/handler.go create mode 100644 pkg/modules/rawdataexport/implrawdataexport/handler_test.go create mode 100644 pkg/modules/rawdataexport/implrawdataexport/module.go create mode 100644 pkg/modules/rawdataexport/rawdataexport.go create mode 100644 pkg/types/ctxtypes/clickhouse.go diff --git a/pkg/apiserver/config.go b/pkg/apiserver/config.go index e87d80924305..6619f0847bc3 100644 --- a/pkg/apiserver/config.go +++ b/pkg/apiserver/config.go @@ -38,6 +38,7 @@ func newConfig() factory.Config { ExcludedRoutes: []string{ "/api/v1/logs/tail", "/api/v3/logs/livetail", + "/api/v1/export_raw_data", }, }, Logging: Logging{ diff --git a/pkg/modules/rawdataexport/implrawdataexport/constants.go b/pkg/modules/rawdataexport/implrawdataexport/constants.go new file mode 100644 index 000000000000..7987d735578b --- /dev/null +++ b/pkg/modules/rawdataexport/implrawdataexport/constants.go @@ -0,0 +1,19 @@ +package implrawdataexport + +import ( + "time" +) + +const ( + // Row Limits + MaxExportRowCountLimit = 50_000 // 50k + DefaultExportRowCountLimit = 10_000 // 10k + + // Data Limits + MaxExportBytesLimit = 10 * 1024 * 1024 * 1024 // 10 GB + + // Query Limits + ChunkSize = 5_000 // 5k + ClickhouseExportRawDataMaxThreads = 2 + ClickhouseExportRawDataTimeout = 10 * time.Minute +) diff --git a/pkg/modules/rawdataexport/implrawdataexport/handler.go b/pkg/modules/rawdataexport/implrawdataexport/handler.go new file mode 100644 index 000000000000..a9e15845b252 --- /dev/null +++ b/pkg/modules/rawdataexport/implrawdataexport/handler.go @@ -0,0 +1,516 @@ +package implrawdataexport + +import ( + "encoding/csv" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strconv" + "strings" + "time" + "unicode" + "unicode/utf8" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/http/render" + "github.com/SigNoz/signoz/pkg/modules/rawdataexport" + "github.com/SigNoz/signoz/pkg/telemetrylogs" + "github.com/SigNoz/signoz/pkg/types/authtypes" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type handler struct { + module rawdataexport.Module +} + +func NewHandler(module rawdataexport.Module) rawdataexport.Handler { + return &handler{module: module} +} + +// ExportRawData handles data export requests. +// +// API Documentation: +// Endpoint: GET /api/v1/export_raw_data +// +// Query Parameters: +// +// - source (optional): Type of data to export ["logs" (default), "metrics", "traces"] +// Note: Currently only "logs" is fully supported +// +// - format (optional): Output format ["csv" (default), "jsonl"] +// +// - start (required): Start time for query (Unix timestamp in nanoseconds) +// +// - end (required): End time for query (Unix timestamp in nanoseconds) +// +// - limit (optional): Maximum number of rows to export +// Constraints: Must be positive and cannot exceed MAX_EXPORT_ROW_COUNT_LIMIT +// +// - filter (optional): Filter expression to apply to the query +// +// - columns (optional): Specific columns to include in export +// Default: all columns are returned +// Format: ["context.field:type", "context.field", "field"] +// +// - order_by (optional): Sorting specification ["column:direction" or "context.field:type:direction"] +// Direction: "asc" or "desc" +// Default: ["timestamp:desc", "id:desc"] +// +// Response Headers: +// - Content-Type: "text/csv" or "application/x-ndjson" +// - Content-Encoding: "gzip" (handled by HTTP middleware) +// - Content-Disposition: "attachment; filename=\"data_exported.[format]\"" +// - Cache-Control: "no-cache" +// - Vary: "Accept-Encoding" +// - Transfer-Encoding: "chunked" +// - Trailers: X-Response-Complete +// +// Response Format: +// +// CSV: Headers in first row, data in subsequent rows +// JSONL: One JSON object per line +// +// Example Usage: +// +// Basic CSV export: +// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000 +// +// Export with columns and format: +// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000&format=jsonl +// &columns=timestamp&columns=severity&columns=message +// +// Export with filter and ordering: +// GET /api/v1/export_raw_data?start=1693612800000000000&end=1693699199000000000 +// &filter=severity="error"&order_by=timestamp:desc&limit=1000 +func (handler *handler) ExportRawData(rw http.ResponseWriter, r *http.Request) { + source, err := getExportQuerySource(r.URL.Query()) + if err != nil { + render.Error(rw, err) + return + } + + switch source { + case "logs": + handler.exportLogs(rw, r) + case "traces": + handler.exportTraces(rw, r) + case "metrics": + handler.exportMetrics(rw, r) + default: + render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid source: must be logs")) + } +} + +func (handler *handler) exportMetrics(rw http.ResponseWriter, r *http.Request) { + render.Error(rw, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "metrics export is not yet supported")) +} + +func (handler *handler) exportTraces(rw http.ResponseWriter, r *http.Request) { + render.Error(rw, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "traces export is not yet supported")) +} + +func (handler *handler) exportLogs(rw http.ResponseWriter, r *http.Request) { + // Set up response headers + rw.Header().Set("Cache-Control", "no-cache") + rw.Header().Set("Vary", "Accept-Encoding") // Indicate that response varies based on Accept-Encoding + rw.Header().Set("Access-Control-Expose-Headers", "Content-Disposition, X-Response-Complete") + rw.Header().Set("Trailer", "X-Response-Complete") + rw.Header().Set("Transfer-Encoding", "chunked") + + queryParams := r.URL.Query() + + startTime, endTime, err := getExportQueryTimeRange(queryParams) + if err != nil { + render.Error(rw, err) + return + } + + limit, err := getExportQueryLimit(queryParams) + if err != nil { + render.Error(rw, err) + return + } + + format, err := getExportQueryFormat(queryParams) + if err != nil { + render.Error(rw, err) + return + } + + // Set appropriate content type and filename + filename := fmt.Sprintf("data_exported_%s.%s", time.Now().Format("2006-01-02_150405"), format) + rw.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename)) + + filterExpression := queryParams.Get("filter") + + orderByExpression, err := getExportQueryOrderBy(queryParams) + if err != nil { + render.Error(rw, err) + return + } + + columns := getExportQueryColumns(queryParams) + + claims, err := authtypes.ClaimsFromContext(r.Context()) + if err != nil { + render.Error(rw, err) + return + } + + orgID, err := valuer.NewUUID(claims.OrgID) + if err != nil { + render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgID is invalid")) + return + } + + queryRangeRequest := qbtypes.QueryRangeRequest{ + Start: startTime, + End: endTime, + RequestType: qbtypes.RequestTypeRaw, + CompositeQuery: qbtypes.CompositeQuery{ + Queries: []qbtypes.QueryEnvelope{ + { + Type: qbtypes.QueryTypeBuilder, + Spec: nil, + }, + }, + }, + } + + spec := qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{ + Signal: telemetrytypes.SignalLogs, + Name: "raw", + Filter: &qbtypes.Filter{ + Expression: filterExpression, + }, + Limit: limit, + Order: orderByExpression, + } + + spec.SelectFields = columns + + queryRangeRequest.CompositeQuery.Queries[0].Spec = spec + + // This will signal Export module to stop sending data + doneChan := make(chan any) + defer close(doneChan) + rowChan, errChan := handler.module.ExportRawData(r.Context(), orgID, &queryRangeRequest, doneChan) + + var isComplete bool + + switch format { + case "csv", "": + rw.Header().Set("Content-Type", "text/csv") + csvWriter := csv.NewWriter(rw) + isComplete, err = handler.exportLogsCSV(rowChan, errChan, csvWriter) + if err != nil { + render.Error(rw, err) + return + } + csvWriter.Flush() + case "jsonl": + rw.Header().Set("Content-Type", "application/x-ndjson") + isComplete, err = handler.exportLogsJSONL(rowChan, errChan, rw) + if err != nil { + render.Error(rw, err) + return + } + default: + render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl")) + return + } + + rw.Header().Set("X-Response-Complete", strconv.FormatBool(isComplete)) +} + +func (handler *handler) exportLogsCSV(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, csvWriter *csv.Writer) (bool, error) { + var header []string + + headerToIndexMapping := make(map[string]int, len(header)) + + totalBytes := uint64(0) + for { + select { + case row, ok := <-rowChan: + if !ok { + return true, nil + } + if header == nil { + // Initialize and write header for CSV + header = constructCSVHeaderFromQueryResponse(row.Data) + + if err := csvWriter.Write(header); err != nil { + return false, err + } + + for i, col := range header { + headerToIndexMapping[col] = i + } + } + record := constructCSVRecordFromQueryResponse(row.Data, headerToIndexMapping) + if err := csvWriter.Write(record); err != nil { + return false, err + } + + totalBytes += getsizeOfStringSlice(record) + if totalBytes > MaxExportBytesLimit { + return false, nil + } + case err := <-errChan: + if err != nil { + return false, err + } + } + } +} + +func (handler *handler) exportLogsJSONL(rowChan <-chan *qbtypes.RawRow, errChan <-chan error, writer io.Writer) (bool, error) { + + totalBytes := uint64(0) + for { + select { + case row, ok := <-rowChan: + if !ok { + return true, nil + } + // Handle JSON format (JSONL - one object per line) + jsonBytes, _ := json.Marshal(row.Data) + totalBytes += uint64(len(jsonBytes)) + 1 // +1 for newline + + if _, err := writer.Write(jsonBytes); err != nil { + return false, errors.NewUnexpectedf(errors.CodeInternal, "error writing JSON: %s", err) + } + if _, err := writer.Write([]byte("\n")); err != nil { + return false, errors.NewUnexpectedf(errors.CodeInternal, "error writing JSON newline: %s", err) + } + + if totalBytes > MaxExportBytesLimit { + return false, nil + } + case err := <-errChan: + if err != nil { + return false, err + } + } + } +} + +func getExportQuerySource(queryParams url.Values) (string, error) { + switch queryParams.Get("source") { + case "logs", "": + return "logs", nil + case "metrics": + return "metrics", errors.NewInvalidInputf(errors.CodeInvalidInput, "metrics export not yet supported") + case "traces": + return "traces", errors.NewInvalidInputf(errors.CodeInvalidInput, "traces export not yet supported") + default: + return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid source: must be logs, metrics or traces") + } +} + +func getExportQueryFormat(queryParams url.Values) (string, error) { + switch queryParams.Get("format") { + case "csv", "": + return "csv", nil + case "jsonl": + return "jsonl", nil + default: + return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid format: must be csv or jsonl") + } +} + +func getExportQueryLimit(queryParams url.Values) (int, error) { + + limitStr := queryParams.Get("limit") + if limitStr == "" { + return DefaultExportRowCountLimit, nil + } else { + limit, err := strconv.Atoi(limitStr) + if err != nil { + return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid limit format: %s", err.Error()) + } + if limit <= 0 { + return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be positive") + } + if limit > MaxExportRowCountLimit { + return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "limit cannot be more than %d", MaxExportRowCountLimit) + } + return limit, nil + } +} + +func getExportQueryTimeRange(queryParams url.Values) (uint64, uint64, error) { + + startTimeStr := queryParams.Get("start") + endTimeStr := queryParams.Get("end") + + if startTimeStr == "" || endTimeStr == "" { + return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "start and end time are required") + } + startTime, err := strconv.ParseUint(startTimeStr, 10, 64) + if err != nil { + return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid start time format: %s", err.Error()) + } + endTime, err := strconv.ParseUint(endTimeStr, 10, 64) + if err != nil { + return 0, 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid end time format: %s", err.Error()) + } + return startTime, endTime, nil +} + +func constructCSVHeaderFromQueryResponse(data map[string]any) []string { + header := make([]string, 0, len(data)) + for key := range data { + header = append(header, key) + } + return header +} + +// sanitizeForCSV sanitizes a string for CSV by prefixing a single quote if the first non-whitespace rune is '=', '+', '-', or '@'. +// Excel and sheets remove these leading single quote when displaying the cell content. +// TODO: will revisit this in a future PR +func sanitizeForCSV(s string) string { + // Find first non-whitespace rune + i := 0 + for i < len(s) { + r, size := utf8.DecodeRuneInString(s[i:]) + if !unicode.IsSpace(r) { + // If first non-space is risky, prefix a single quote + switch r { + case '=', '+', '-', '@': + return "'" + s + } + return s + } + i += size + } + return s // all whitespace +} + +func constructCSVRecordFromQueryResponse(data map[string]any, headerToIndexMapping map[string]int) []string { + record := make([]string, len(headerToIndexMapping)) + + for key, value := range data { + if index, exists := headerToIndexMapping[key]; exists && value != nil { + + var valueStr string + switch v := value.(type) { + case string: + valueStr = v + case int: + valueStr = strconv.FormatInt(int64(v), 10) + case int32: + valueStr = strconv.FormatInt(int64(v), 10) + case int64: + valueStr = strconv.FormatInt(v, 10) + case uint: + valueStr = strconv.FormatUint(uint64(v), 10) + case uint32: + valueStr = strconv.FormatUint(uint64(v), 10) + case uint64: + valueStr = strconv.FormatUint(v, 10) + case float32: + valueStr = strconv.FormatFloat(float64(v), 'f', -1, 32) + case float64: + valueStr = strconv.FormatFloat(v, 'f', -1, 64) + case bool: + valueStr = strconv.FormatBool(v) + case time.Time: + valueStr = v.Format(time.RFC3339Nano) + case []byte: + valueStr = string(v) + case fmt.Stringer: + valueStr = v.String() + + default: + // For all other complex types (maps, structs, etc.) + jsonBytes, _ := json.Marshal(v) + valueStr = string(jsonBytes) + } + + record[index] = sanitizeForCSV(valueStr) + } + } + return record +} + +// getExportQueryColumns parses the "columns" query parameters and returns a slice of TelemetryFieldKey structs. +// Each column should be a valid telemetry field key in the format "context.field:type" or "context.field" or "field" +func getExportQueryColumns(queryParams url.Values) []telemetrytypes.TelemetryFieldKey { + columnParams := queryParams["columns"] + + columns := make([]telemetrytypes.TelemetryFieldKey, 0, len(columnParams)) + + for _, columnStr := range columnParams { + // Skip empty strings + columnStr = strings.TrimSpace(columnStr) + if columnStr == "" { + continue + } + + columns = append(columns, telemetrytypes.GetFieldKeyFromKeyText(columnStr)) + } + + return columns +} + +func getsizeOfStringSlice(slice []string) uint64 { + var totalBytes uint64 + for _, str := range slice { + totalBytes += uint64(len(str)) + } + return totalBytes +} + +// getExportQueryOrderBy parses the "order_by" query parameters and returns a slice of OrderBy structs. +// Each "order_by" parameter should be in the format "column:direction" +// Each "column" should be a valid telemetry field key in the format "context.field:type" or "context.field" or "field" +func getExportQueryOrderBy(queryParams url.Values) ([]qbtypes.OrderBy, error) { + orderByParam := queryParams.Get("order_by") + + orderByParam = strings.TrimSpace(orderByParam) + if orderByParam == "" { + return telemetrylogs.DefaultLogsV2SortingOrder, nil + } + + parts := strings.Split(orderByParam, ":") + if len(parts) != 2 && len(parts) != 3 { + return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order_by format: %s, should be :", orderByParam) + } + + column := strings.Join(parts[:len(parts)-1], ":") + direction := parts[len(parts)-1] + + orderDirection, ok := qbtypes.OrderDirectionMap[direction] + if !ok { + return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order_by direction: %s, should be one of %s, %s", direction, qbtypes.OrderDirectionAsc, qbtypes.OrderDirectionDesc) + } + + orderByKey := telemetrytypes.GetFieldKeyFromKeyText(column) + + orderBy := []qbtypes.OrderBy{ + { + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: orderByKey, + }, + Direction: orderDirection, + }, + } + + // If we are ordering by the timestamp column, also order by the ID column + if orderByKey.Name == telemetrylogs.LogsV2TimestampColumn { + orderBy = append(orderBy, qbtypes.OrderBy{ + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: telemetrylogs.LogsV2IDColumn, + }, + }, + Direction: orderDirection, + }) + } + return orderBy, nil +} diff --git a/pkg/modules/rawdataexport/implrawdataexport/handler_test.go b/pkg/modules/rawdataexport/implrawdataexport/handler_test.go new file mode 100644 index 000000000000..acd39de4f1e7 --- /dev/null +++ b/pkg/modules/rawdataexport/implrawdataexport/handler_test.go @@ -0,0 +1,563 @@ +package implrawdataexport + +import ( + "net/url" + "strconv" + "testing" + + "github.com/SigNoz/signoz/pkg/telemetrylogs" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" + "github.com/stretchr/testify/assert" +) + +func TestGetExportQuerySource(t *testing.T) { + tests := []struct { + name string + queryParams url.Values + expectedSource string + expectedError bool + }{ + { + name: "default logs source", + queryParams: url.Values{}, + expectedSource: "logs", + expectedError: false, + }, + { + name: "explicit logs source", + queryParams: url.Values{"source": {"logs"}}, + expectedSource: "logs", + expectedError: false, + }, + { + name: "metrics source - not supported", + queryParams: url.Values{"source": {"metrics"}}, + expectedSource: "metrics", + expectedError: true, + }, + { + name: "traces source - not supported", + queryParams: url.Values{"source": {"traces"}}, + expectedSource: "traces", + expectedError: true, + }, + { + name: "invalid source", + queryParams: url.Values{"source": {"invalid"}}, + expectedSource: "", + expectedError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + source, err := getExportQuerySource(tt.queryParams) + assert.Equal(t, tt.expectedSource, source) + if tt.expectedError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestGetExportQueryFormat(t *testing.T) { + tests := []struct { + name string + queryParams url.Values + expectedFormat string + expectedError bool + }{ + { + name: "default csv format", + queryParams: url.Values{}, + expectedFormat: "csv", + expectedError: false, + }, + { + name: "explicit csv format", + queryParams: url.Values{"format": {"csv"}}, + expectedFormat: "csv", + expectedError: false, + }, + { + name: "jsonl format", + queryParams: url.Values{"format": {"jsonl"}}, + expectedFormat: "jsonl", + expectedError: false, + }, + { + name: "invalid format", + queryParams: url.Values{"format": {"xml"}}, + expectedFormat: "", + expectedError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + format, err := getExportQueryFormat(tt.queryParams) + assert.Equal(t, tt.expectedFormat, format) + if tt.expectedError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestGetExportQueryLimit(t *testing.T) { + tests := []struct { + name string + queryParams url.Values + expectedLimit int + expectedError bool + }{ + { + name: "default limit", + queryParams: url.Values{}, + expectedLimit: DefaultExportRowCountLimit, + expectedError: false, + }, + { + name: "valid limit", + queryParams: url.Values{"limit": {"5000"}}, + expectedLimit: 5000, + expectedError: false, + }, + { + name: "maximum limit", + queryParams: url.Values{"limit": {strconv.Itoa(MaxExportRowCountLimit)}}, + expectedLimit: MaxExportRowCountLimit, + expectedError: false, + }, + { + name: "limit exceeds maximum", + queryParams: url.Values{"limit": {"100000"}}, + expectedLimit: 0, + expectedError: true, + }, + { + name: "invalid limit format", + queryParams: url.Values{"limit": {"invalid"}}, + expectedLimit: 0, + expectedError: true, + }, + { + name: "negative limit", + queryParams: url.Values{"limit": {"-100"}}, + expectedLimit: 0, + expectedError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + limit, err := getExportQueryLimit(tt.queryParams) + assert.Equal(t, tt.expectedLimit, limit) + if tt.expectedError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + }) + } +} + +func TestGetExportQueryTimeRange(t *testing.T) { + tests := []struct { + name string + queryParams url.Values + expectedStartTime uint64 + expectedEndTime uint64 + expectedError bool + }{ + { + name: "valid time range", + queryParams: url.Values{ + "start": {"1640995200"}, + "end": {"1641081600"}, + }, + expectedStartTime: 1640995200, + expectedEndTime: 1641081600, + expectedError: false, + }, + { + name: "missing start time", + queryParams: url.Values{"end": {"1641081600"}}, + expectedError: true, + }, + { + name: "missing end time", + queryParams: url.Values{"start": {"1640995200"}}, + expectedError: true, + }, + { + name: "missing both times", + queryParams: url.Values{}, + expectedError: true, + }, + { + name: "invalid start time format", + queryParams: url.Values{ + "start": {"invalid"}, + "end": {"1641081600"}, + }, + expectedError: true, + }, + { + name: "invalid end time format", + queryParams: url.Values{ + "start": {"1640995200"}, + "end": {"invalid"}, + }, + expectedError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + startTime, endTime, err := getExportQueryTimeRange(tt.queryParams) + if tt.expectedError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tt.expectedStartTime, startTime) + assert.Equal(t, tt.expectedEndTime, endTime) + } + }) + } +} + +func TestGetExportQueryColumns(t *testing.T) { + tests := []struct { + name string + queryParams url.Values + expectedColumns []telemetrytypes.TelemetryFieldKey + }{ + { + name: "no columns specified", + queryParams: url.Values{}, + expectedColumns: []telemetrytypes.TelemetryFieldKey{}, + }, + { + name: "single column", + queryParams: url.Values{ + "columns": {"timestamp"}, + }, + expectedColumns: []telemetrytypes.TelemetryFieldKey{ + {Name: "timestamp"}, + }, + }, + { + name: "multiple columns", + queryParams: url.Values{ + "columns": {"timestamp", "message", "level"}, + }, + expectedColumns: []telemetrytypes.TelemetryFieldKey{ + {Name: "timestamp"}, + {Name: "message"}, + {Name: "level"}, + }, + }, + { + name: "empty column name (should be skipped)", + queryParams: url.Values{ + "columns": {"timestamp", "", "level"}, + }, + expectedColumns: []telemetrytypes.TelemetryFieldKey{ + {Name: "timestamp"}, + {Name: "level"}, + }, + }, + { + name: "whitespace column name (should be skipped)", + queryParams: url.Values{ + "columns": {"timestamp", " ", "level"}, + }, + expectedColumns: []telemetrytypes.TelemetryFieldKey{ + {Name: "timestamp"}, + {Name: "level"}, + }, + }, + { + name: "valid column name with data type", + queryParams: url.Values{ + "columns": {"timestamp", "attribute.user:string", "level"}, + }, + expectedColumns: []telemetrytypes.TelemetryFieldKey{ + {Name: "timestamp"}, + {Name: "user", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString}, + {Name: "level"}, + }, + }, + { + name: "valid column name with dot notation", + queryParams: url.Values{ + "columns": {"timestamp", "attribute.user.string", "level"}, + }, + expectedColumns: []telemetrytypes.TelemetryFieldKey{ + {Name: "timestamp"}, + {Name: "user.string", FieldContext: telemetrytypes.FieldContextAttribute}, + {Name: "level"}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + columns := getExportQueryColumns(tt.queryParams) + assert.Equal(t, len(tt.expectedColumns), len(columns)) + for i, expectedCol := range tt.expectedColumns { + assert.Equal(t, expectedCol, columns[i]) + } + }) + } +} + +func TestGetExportQueryOrderBy(t *testing.T) { + tests := []struct { + name string + queryParams url.Values + expectedOrder []qbtypes.OrderBy + expectedError bool + }{ + { + name: "no order specified", + queryParams: url.Values{}, + expectedOrder: []qbtypes.OrderBy{ + { + Direction: qbtypes.OrderDirectionDesc, + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: telemetrylogs.LogsV2TimestampColumn, + }, + }, + }, + { + Direction: qbtypes.OrderDirectionDesc, + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: telemetrylogs.LogsV2IDColumn, + }, + }, + }, + }, + expectedError: false, + }, + { + name: "single order error, direction not specified", + queryParams: url.Values{ + "order_by": {"timestamp"}, + }, + expectedOrder: nil, + expectedError: true, + }, + { + name: "single order no error", + queryParams: url.Values{ + "order_by": {"timestamp:asc"}, + }, + expectedOrder: []qbtypes.OrderBy{ + { + Direction: qbtypes.OrderDirectionAsc, + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: telemetrylogs.LogsV2TimestampColumn, + }, + }, + }, + { + Direction: qbtypes.OrderDirectionAsc, + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: telemetrylogs.LogsV2IDColumn, + }, + }, + }, + }, + expectedError: false, + }, + { + name: "multiple orders", + queryParams: url.Values{ + "order_by": {"timestamp:asc", "body:desc", "id:asc"}, + }, + expectedOrder: []qbtypes.OrderBy{ + { + Direction: qbtypes.OrderDirectionAsc, + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: telemetrylogs.LogsV2TimestampColumn, + }, + }, + }, + { + Direction: qbtypes.OrderDirectionAsc, + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: telemetrylogs.LogsV2IDColumn, + }, + }, + }, + }, + expectedError: false, + }, + { + name: "empty order name (should be skipped)", + queryParams: url.Values{ + "order_by": {"timestamp:asc", "", "id:asc"}, + }, + expectedOrder: []qbtypes.OrderBy{ + { + Direction: qbtypes.OrderDirectionAsc, + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: telemetrylogs.LogsV2TimestampColumn, + }, + }, + }, + { + Direction: qbtypes.OrderDirectionAsc, + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: telemetrylogs.LogsV2IDColumn, + }, + }, + }, + }, + expectedError: false, + }, + { + name: "whitespace order name (should be skipped)", + queryParams: url.Values{ + "order_by": {"timestamp:asc", " ", "id:asc"}, + }, + expectedOrder: []qbtypes.OrderBy{ + { + Direction: qbtypes.OrderDirectionAsc, + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: telemetrylogs.LogsV2TimestampColumn, + }, + }, + }, + { + Direction: qbtypes.OrderDirectionAsc, + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: telemetrylogs.LogsV2IDColumn, + }, + }, + }, + }, + expectedError: false, + }, + { + name: "invalid order name (should error out)", + queryParams: url.Values{ + "order_by": {"attributes.user:", "id:asc"}, + }, + expectedOrder: nil, + expectedError: true, + }, + { + name: "valid order name (should be included)", + queryParams: url.Values{ + "order_by": {"attribute.user:string:desc", "id:asc"}, + }, + expectedOrder: []qbtypes.OrderBy{ + { + Direction: qbtypes.OrderDirectionDesc, + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "user", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, + }, + }, + expectedError: false, + }, + { + name: "valid order name (should be included)", + queryParams: url.Values{ + "order_by": {"attribute.user.string:desc", "id:asc"}, + }, + expectedOrder: []qbtypes.OrderBy{ + { + Direction: qbtypes.OrderDirectionDesc, + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: "user.string", + FieldContext: telemetrytypes.FieldContextAttribute, + }, + }, + }, + }, + expectedError: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + order, err := getExportQueryOrderBy(tt.queryParams) + if tt.expectedError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, len(tt.expectedOrder), len(order)) + for i, expectedOrd := range tt.expectedOrder { + assert.Equal(t, expectedOrd, order[i]) + } + } + }) + } +} + +func TestConstructCSVHeaderFromQueryResponse(t *testing.T) { + data := map[string]any{ + "timestamp": 1640995200, + "message": "test message", + "level": "INFO", + "id": "test-id", + } + + header := constructCSVHeaderFromQueryResponse(data) + + // Since map iteration order is not guaranteed, check that all expected keys are present + expectedKeys := []string{"timestamp", "message", "level", "id"} + assert.Equal(t, len(expectedKeys), len(header)) + + for _, key := range expectedKeys { + assert.Contains(t, header, key) + } +} + +func TestConstructCSVRecordFromQueryResponse(t *testing.T) { + data := map[string]any{ + "timestamp": 1640995200, + "message": "test message", + "level": "INFO", + "id": "test-id", + } + + headerToIndexMapping := map[string]int{ + "timestamp": 0, + "message": 1, + "level": 2, + "id": 3, + } + + record := constructCSVRecordFromQueryResponse(data, headerToIndexMapping) + + assert.Equal(t, 4, len(record)) + assert.Equal(t, "1640995200", record[0]) + assert.Equal(t, "test message", record[1]) + assert.Equal(t, "INFO", record[2]) + assert.Equal(t, "test-id", record[3]) +} diff --git a/pkg/modules/rawdataexport/implrawdataexport/module.go b/pkg/modules/rawdataexport/implrawdataexport/module.go new file mode 100644 index 000000000000..c9a5e0ced7a6 --- /dev/null +++ b/pkg/modules/rawdataexport/implrawdataexport/module.go @@ -0,0 +1,89 @@ +package implrawdataexport + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/modules/rawdataexport" + "github.com/SigNoz/signoz/pkg/querier" + "github.com/SigNoz/signoz/pkg/types/ctxtypes" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type Module struct { + querier querier.Querier +} + +func NewModule(querier querier.Querier) rawdataexport.Module { + return &Module{ + querier: querier, + } +} + +func (m *Module) ExportRawData(ctx context.Context, orgID valuer.UUID, rangeRequest *qbtypes.QueryRangeRequest, doneChan chan any) (chan *qbtypes.RawRow, chan error) { + + spec := rangeRequest.CompositeQuery.Queries[0].Spec.(qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) + rowCountLimit := spec.Limit + + rowChan := make(chan *qbtypes.RawRow, 1) + errChan := make(chan error, 1) + + go func() { + // Set clickhouse max threads + ctx := ctxtypes.SetClickhouseMaxThreads(ctx, ClickhouseExportRawDataMaxThreads) + // Set clickhouse timeout + contextWithTimeout, cancel := context.WithTimeout(ctx, ClickhouseExportRawDataTimeout) + defer cancel() + defer close(errChan) + defer close(rowChan) + + rowCount := 0 + + for rowCount < rowCountLimit { + spec.Limit = min(ChunkSize, rowCountLimit-rowCount) + spec.Offset = rowCount + + rangeRequest.CompositeQuery.Queries[0].Spec = spec + + response, err := m.querier.QueryRange(contextWithTimeout, orgID, rangeRequest) + if err != nil { + errChan <- err + return + } + + newRowsCount := 0 + for _, result := range response.Data.Results { + resultData, ok := result.(*qbtypes.RawData) + if !ok { + errChan <- errors.NewInternalf(errors.CodeInternal, "expected RawData, got %T", result) + return + } + + newRowsCount += len(resultData.Rows) + for _, row := range resultData.Rows { + select { + case rowChan <- row: + case <-doneChan: + return + case <-ctx.Done(): + errChan <- ctx.Err() + return + } + } + + } + + // Break if we did not receive any new rows + if newRowsCount == 0 { + return + } + + rowCount += newRowsCount + + } + }() + + return rowChan, errChan + +} diff --git a/pkg/modules/rawdataexport/rawdataexport.go b/pkg/modules/rawdataexport/rawdataexport.go new file mode 100644 index 000000000000..9d8b79cea457 --- /dev/null +++ b/pkg/modules/rawdataexport/rawdataexport.go @@ -0,0 +1,17 @@ +package rawdataexport + +import ( + "context" + "net/http" + + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type Module interface { + ExportRawData(ctx context.Context, orgID valuer.UUID, rangeRequest *qbtypes.QueryRangeRequest, doneChan chan any) (chan *qbtypes.RawRow, chan error) +} + +type Handler interface { + ExportRawData(http.ResponseWriter, *http.Request) +} diff --git a/pkg/query-service/app/cloudintegrations/controller_test.go b/pkg/query-service/app/cloudintegrations/controller_test.go index a84c17a45684..2bbcd200d0b5 100644 --- a/pkg/query-service/app/cloudintegrations/controller_test.go +++ b/pkg/query-service/app/cloudintegrations/controller_test.go @@ -40,7 +40,7 @@ func TestRegenerateConnectionUrlWithUpdatedConfig(t *testing.T) { jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) emailing := emailingtest.New() analytics := analyticstest.New() - modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics) + modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) user, apiErr := createTestUser(modules.OrgSetter, modules.User) require.Nil(apiErr) @@ -97,7 +97,7 @@ func TestAgentCheckIns(t *testing.T) { jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) emailing := emailingtest.New() analytics := analyticstest.New() - modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics) + modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) user, apiErr := createTestUser(modules.OrgSetter, modules.User) require.Nil(apiErr) @@ -193,7 +193,7 @@ func TestCantDisconnectNonExistentAccount(t *testing.T) { jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) emailing := emailingtest.New() analytics := analyticstest.New() - modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics) + modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) user, apiErr := createTestUser(modules.OrgSetter, modules.User) require.Nil(apiErr) @@ -221,7 +221,7 @@ func TestConfigureService(t *testing.T) { jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) emailing := emailingtest.New() analytics := analyticstest.New() - modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics) + modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) user, apiErr := createTestUser(modules.OrgSetter, modules.User) require.Nil(apiErr) diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 5e7442a1d49d..67e9a9576a80 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -617,6 +617,9 @@ func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) { router.HandleFunc("/api/v3/licenses/active", am.ViewAccess(func(rw http.ResponseWriter, req *http.Request) { aH.LicensingAPI.Activate(rw, req) })).Methods(http.MethodGet) + + // Export + router.HandleFunc("/api/v1/export_raw_data", am.ViewAccess(aH.Signoz.Handlers.RawDataExport.ExportRawData)).Methods(http.MethodGet) } func (ah *APIHandler) MetricExplorerRoutes(router *mux.Router, am *middleware.AuthZ) { diff --git a/pkg/query-service/app/integrations/manager_test.go b/pkg/query-service/app/integrations/manager_test.go index 43d18ba9b233..aec4b0ac38c9 100644 --- a/pkg/query-service/app/integrations/manager_test.go +++ b/pkg/query-service/app/integrations/manager_test.go @@ -33,7 +33,7 @@ func TestIntegrationLifecycle(t *testing.T) { jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) emailing := emailingtest.New() analytics := analyticstest.New() - modules := signoz.NewModules(store, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics) + modules := signoz.NewModules(store, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) user, apiErr := createTestUser(modules.OrgSetter, modules.User) if apiErr != nil { t.Fatalf("could not create test user: %v", apiErr) diff --git a/pkg/query-service/tests/integration/filter_suggestions_test.go b/pkg/query-service/tests/integration/filter_suggestions_test.go index a57041ae2c6e..77b3e1b87c3d 100644 --- a/pkg/query-service/tests/integration/filter_suggestions_test.go +++ b/pkg/query-service/tests/integration/filter_suggestions_test.go @@ -317,7 +317,7 @@ func NewFilterSuggestionsTestBed(t *testing.T) *FilterSuggestionsTestBed { jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) emailing := emailingtest.New() analytics := analyticstest.New() - modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics) + modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) handlers := signoz.NewHandlers(modules, providerSettings) apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{ diff --git a/pkg/query-service/tests/integration/logparsingpipeline_test.go b/pkg/query-service/tests/integration/logparsingpipeline_test.go index ea66788b46de..6be197bacdad 100644 --- a/pkg/query-service/tests/integration/logparsingpipeline_test.go +++ b/pkg/query-service/tests/integration/logparsingpipeline_test.go @@ -497,7 +497,7 @@ func NewTestbedWithoutOpamp(t *testing.T, sqlStore sqlstore.SQLStore) *LogPipeli jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) emailing := emailingtest.New() analytics := analyticstest.New() - modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics) + modules := signoz.NewModules(sqlStore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) handlers := signoz.NewHandlers(modules, providerSettings) apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{ diff --git a/pkg/query-service/tests/integration/signoz_cloud_integrations_test.go b/pkg/query-service/tests/integration/signoz_cloud_integrations_test.go index d713ba7387d5..35faf33d1628 100644 --- a/pkg/query-service/tests/integration/signoz_cloud_integrations_test.go +++ b/pkg/query-service/tests/integration/signoz_cloud_integrations_test.go @@ -378,7 +378,7 @@ func NewCloudIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *CloudI jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) emailing := emailingtest.New() analytics := analyticstest.New() - modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics) + modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) handlers := signoz.NewHandlers(modules, providerSettings) apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{ diff --git a/pkg/query-service/tests/integration/signoz_integrations_test.go b/pkg/query-service/tests/integration/signoz_integrations_test.go index 3abd428926d4..c8177b3bc703 100644 --- a/pkg/query-service/tests/integration/signoz_integrations_test.go +++ b/pkg/query-service/tests/integration/signoz_integrations_test.go @@ -593,7 +593,7 @@ func NewIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *Integration jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) emailing := emailingtest.New() analytics := analyticstest.New() - modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics) + modules := signoz.NewModules(testDB, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, nil) handlers := signoz.NewHandlers(modules, providerSettings) apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{ diff --git a/pkg/signoz/handler.go b/pkg/signoz/handler.go index f1d1879fda11..546977f31b03 100644 --- a/pkg/signoz/handler.go +++ b/pkg/signoz/handler.go @@ -12,6 +12,8 @@ import ( "github.com/SigNoz/signoz/pkg/modules/preference/implpreference" "github.com/SigNoz/signoz/pkg/modules/quickfilter" "github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter" + "github.com/SigNoz/signoz/pkg/modules/rawdataexport" + "github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport" "github.com/SigNoz/signoz/pkg/modules/savedview" "github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview" "github.com/SigNoz/signoz/pkg/modules/tracefunnel" @@ -21,25 +23,27 @@ import ( ) type Handlers struct { - Organization organization.Handler - Preference preference.Handler - User user.Handler - SavedView savedview.Handler - Apdex apdex.Handler - Dashboard dashboard.Handler - QuickFilter quickfilter.Handler - TraceFunnel tracefunnel.Handler + Organization organization.Handler + Preference preference.Handler + User user.Handler + SavedView savedview.Handler + Apdex apdex.Handler + Dashboard dashboard.Handler + QuickFilter quickfilter.Handler + TraceFunnel tracefunnel.Handler + RawDataExport rawdataexport.Handler } func NewHandlers(modules Modules, providerSettings factory.ProviderSettings) Handlers { return Handlers{ - Organization: implorganization.NewHandler(modules.OrgGetter, modules.OrgSetter), - Preference: implpreference.NewHandler(modules.Preference), - User: impluser.NewHandler(modules.User), - SavedView: implsavedview.NewHandler(modules.SavedView), - Apdex: implapdex.NewHandler(modules.Apdex), - Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings), - QuickFilter: implquickfilter.NewHandler(modules.QuickFilter), - TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel), + Organization: implorganization.NewHandler(modules.OrgGetter, modules.OrgSetter), + Preference: implpreference.NewHandler(modules.Preference), + User: impluser.NewHandler(modules.User), + SavedView: implsavedview.NewHandler(modules.SavedView), + Apdex: implapdex.NewHandler(modules.Apdex), + Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings), + QuickFilter: implquickfilter.NewHandler(modules.QuickFilter), + TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel), + RawDataExport: implrawdataexport.NewHandler(modules.RawDataExport), } } diff --git a/pkg/signoz/handler_test.go b/pkg/signoz/handler_test.go index 4fbde586f29d..204093c62c21 100644 --- a/pkg/signoz/handler_test.go +++ b/pkg/signoz/handler_test.go @@ -33,7 +33,7 @@ func TestNewHandlers(t *testing.T) { require.NoError(t, err) jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) emailing := emailingtest.New() - modules := NewModules(sqlstore, jwt, emailing, providerSettings, orgGetter, alertmanager, nil) + modules := NewModules(sqlstore, jwt, emailing, providerSettings, orgGetter, alertmanager, nil, nil) handlers := NewHandlers(modules, providerSettings) diff --git a/pkg/signoz/module.go b/pkg/signoz/module.go index 0a1fa6487099..e5daf80ac14d 100644 --- a/pkg/signoz/module.go +++ b/pkg/signoz/module.go @@ -15,27 +15,31 @@ import ( "github.com/SigNoz/signoz/pkg/modules/preference/implpreference" "github.com/SigNoz/signoz/pkg/modules/quickfilter" "github.com/SigNoz/signoz/pkg/modules/quickfilter/implquickfilter" + "github.com/SigNoz/signoz/pkg/modules/rawdataexport" + "github.com/SigNoz/signoz/pkg/modules/rawdataexport/implrawdataexport" "github.com/SigNoz/signoz/pkg/modules/savedview" "github.com/SigNoz/signoz/pkg/modules/savedview/implsavedview" "github.com/SigNoz/signoz/pkg/modules/tracefunnel" "github.com/SigNoz/signoz/pkg/modules/tracefunnel/impltracefunnel" "github.com/SigNoz/signoz/pkg/modules/user" "github.com/SigNoz/signoz/pkg/modules/user/impluser" + "github.com/SigNoz/signoz/pkg/querier" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/preferencetypes" ) type Modules struct { - OrgGetter organization.Getter - OrgSetter organization.Setter - Preference preference.Module - User user.Module - SavedView savedview.Module - Apdex apdex.Module - Dashboard dashboard.Module - QuickFilter quickfilter.Module - TraceFunnel tracefunnel.Module + OrgGetter organization.Getter + OrgSetter organization.Setter + Preference preference.Module + User user.Module + SavedView savedview.Module + Apdex apdex.Module + Dashboard dashboard.Module + QuickFilter quickfilter.Module + TraceFunnel tracefunnel.Module + RawDataExport rawdataexport.Module } func NewModules( @@ -46,19 +50,21 @@ func NewModules( orgGetter organization.Getter, alertmanager alertmanager.Alertmanager, analytics analytics.Analytics, + querier querier.Querier, ) Modules { quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore)) orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter) user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), jwt, emailing, providerSettings, orgSetter, analytics) return Modules{ - OrgGetter: orgGetter, - OrgSetter: orgSetter, - Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewAvailablePreference()), - SavedView: implsavedview.NewModule(sqlstore), - Apdex: implapdex.NewModule(sqlstore), - Dashboard: impldashboard.NewModule(sqlstore, providerSettings, analytics), - User: user, - QuickFilter: quickfilter, - TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)), + OrgGetter: orgGetter, + OrgSetter: orgSetter, + Preference: implpreference.NewModule(implpreference.NewStore(sqlstore), preferencetypes.NewAvailablePreference()), + SavedView: implsavedview.NewModule(sqlstore), + Apdex: implapdex.NewModule(sqlstore), + Dashboard: impldashboard.NewModule(sqlstore, providerSettings, analytics), + User: user, + QuickFilter: quickfilter, + TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)), + RawDataExport: implrawdataexport.NewModule(querier), } } diff --git a/pkg/signoz/module_test.go b/pkg/signoz/module_test.go index 2445d1d67f5e..676b4b880c33 100644 --- a/pkg/signoz/module_test.go +++ b/pkg/signoz/module_test.go @@ -33,7 +33,7 @@ func TestNewModules(t *testing.T) { require.NoError(t, err) jwt := authtypes.NewJWT("", 1*time.Hour, 1*time.Hour) emailing := emailingtest.New() - modules := NewModules(sqlstore, jwt, emailing, providerSettings, orgGetter, alertmanager, nil) + modules := NewModules(sqlstore, jwt, emailing, providerSettings, orgGetter, alertmanager, nil, nil) reflectVal := reflect.ValueOf(modules) for i := 0; i < reflectVal.NumField(); i++ { diff --git a/pkg/signoz/signoz.go b/pkg/signoz/signoz.go index 53a7d9105bb7..050526772fad 100644 --- a/pkg/signoz/signoz.go +++ b/pkg/signoz/signoz.go @@ -265,7 +265,7 @@ func New( } // Initialize all modules - modules := NewModules(sqlstore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics) + modules := NewModules(sqlstore, jwt, emailing, providerSettings, orgGetter, alertmanager, analytics, querier) // Initialize all handlers for the modules handlers := NewHandlers(modules, providerSettings) diff --git a/pkg/telemetrylogs/const.go b/pkg/telemetrylogs/const.go index 9b0c38a45978..b9bd29bd4d5a 100644 --- a/pkg/telemetrylogs/const.go +++ b/pkg/telemetrylogs/const.go @@ -1,6 +1,36 @@ package telemetrylogs -import "github.com/SigNoz/signoz/pkg/types/telemetrytypes" +import ( + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" + "github.com/SigNoz/signoz/pkg/types/telemetrytypes" +) + +const ( + + // Internal Columns + LogsV2IDColumn = "id" + LogsV2TimestampBucketStartColumn = "ts_bucket_start" + LogsV2ResourceFingerPrintColumn = "resource_fingerprint" + + // Intrinsic Columns + LogsV2TimestampColumn = "timestamp" + LogsV2ObservedTimestampColumn = "observed_timestamp" + LogsV2BodyColumn = "body" + LogsV2TraceIDColumn = "trace_id" + LogsV2SpanIDColumn = "span_id" + LogsV2TraceFlagsColumn = "trace_flags" + LogsV2SeverityTextColumn = "severity_text" + LogsV2SeverityNumberColumn = "severity_number" + LogsV2ScopeNameColumn = "scope_name" + LogsV2ScopeVersionColumn = "scope_version" + + // Contextual Columns + LogsV2AttributesStringColumn = "attributes_string" + LogsV2AttributesNumberColumn = "attributes_number" + LogsV2AttributesBoolColumn = "attributes_bool" + LogsV2ResourcesStringColumn = "resources_string" + LogsV2ScopeStringColumn = "scope_string" +) var ( DefaultFullTextColumn = &telemetrytypes.TelemetryFieldKey{ @@ -63,4 +93,23 @@ var ( FieldDataType: telemetrytypes.FieldDataTypeString, }, } + + DefaultLogsV2SortingOrder = []qbtypes.OrderBy{ + { + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: LogsV2TimestampColumn, + }, + }, + Direction: qbtypes.OrderDirectionDesc, + }, + { + Key: qbtypes.OrderByKey{ + TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{ + Name: LogsV2IDColumn, + }, + }, + Direction: qbtypes.OrderDirectionDesc, + }, + } ) diff --git a/pkg/telemetrylogs/statement_builder.go b/pkg/telemetrylogs/statement_builder.go index b193d3a28f5c..491e25ee6452 100644 --- a/pkg/telemetrylogs/statement_builder.go +++ b/pkg/telemetrylogs/statement_builder.go @@ -220,10 +220,39 @@ func (b *logQueryStatementBuilder) buildListQuery( cteArgs = append(cteArgs, args) } - // Select default columns - sb.Select( - "timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string", - ) + // Select timestamp and id by default + sb.Select(LogsV2TimestampColumn) + sb.SelectMore(LogsV2IDColumn) + if len(query.SelectFields) == 0 { + // Select all default columns + sb.SelectMore(LogsV2TraceIDColumn) + sb.SelectMore(LogsV2SpanIDColumn) + sb.SelectMore(LogsV2TraceFlagsColumn) + sb.SelectMore(LogsV2SeverityTextColumn) + sb.SelectMore(LogsV2SeverityNumberColumn) + sb.SelectMore(LogsV2ScopeNameColumn) + sb.SelectMore(LogsV2ScopeVersionColumn) + sb.SelectMore(LogsV2BodyColumn) + sb.SelectMore(LogsV2AttributesStringColumn) + sb.SelectMore(LogsV2AttributesNumberColumn) + sb.SelectMore(LogsV2AttributesBoolColumn) + sb.SelectMore(LogsV2ResourcesStringColumn) + sb.SelectMore(LogsV2ScopeStringColumn) + + } else { + // Select specified columns + for index := range query.SelectFields { + if query.SelectFields[index].Name == LogsV2TimestampColumn || query.SelectFields[index].Name == LogsV2IDColumn { + continue + } + // get column expression for the field - use array index directly to avoid pointer to loop variable + colExpr, err := b.fm.ColumnExpressionFor(ctx, &query.SelectFields[index], keys) + if err != nil { + return nil, err + } + sb.SelectMore(colExpr) + } + } // From table sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName)) diff --git a/pkg/types/ctxtypes/clickhouse.go b/pkg/types/ctxtypes/clickhouse.go new file mode 100644 index 000000000000..2ceeeba5e923 --- /dev/null +++ b/pkg/types/ctxtypes/clickhouse.go @@ -0,0 +1,14 @@ +package ctxtypes + +import "context" + +type ctxKey string + +const ( + ClickhouseContextMaxThreadsKey ctxKey = "clickhouse_max_threads" +) + +// SetClickhouseMaxThreads stores the max threads value in context. +func SetClickhouseMaxThreads(ctx context.Context, maxThreads int) context.Context { + return context.WithValue(ctx, ClickhouseContextMaxThreadsKey, maxThreads) +} diff --git a/pkg/types/querybuildertypes/querybuildertypesv5/builder_elements.go b/pkg/types/querybuildertypes/querybuildertypesv5/builder_elements.go index a031005c4b9a..a89fed33ef92 100644 --- a/pkg/types/querybuildertypes/querybuildertypesv5/builder_elements.go +++ b/pkg/types/querybuildertypes/querybuildertypesv5/builder_elements.go @@ -145,6 +145,13 @@ var ( OrderDirectionDesc = OrderDirection{valuer.NewString("desc")} ) +var ( + OrderDirectionMap = map[string]OrderDirection{ + "asc": OrderDirectionAsc, + "desc": OrderDirectionDesc, + } +) + type ReduceTo struct { valuer.String } From 0129326a0b06b2865c865d2bf60b31477f8d2c43 Mon Sep 17 00:00:00 2001 From: Abhi kumar Date: Tue, 9 Sep 2025 18:44:33 +0530 Subject: [PATCH 06/51] Fix: changelog modal spacing issue (#9048) * fix: added fix for changelog modal styling issue * chore: minor code cleanup * chore: pr review changes * chore: minor preetier fix --- .../components/ChangelogRenderer.styles.scss | 40 +++++++------ .../components/ChangelogRenderer.tsx | 56 ++++++++++--------- 2 files changed, 52 insertions(+), 44 deletions(-) diff --git a/frontend/src/components/ChangelogModal/components/ChangelogRenderer.styles.scss b/frontend/src/components/ChangelogModal/components/ChangelogRenderer.styles.scss index 2f050ab78eb6..bfd29677ca02 100644 --- a/frontend/src/components/ChangelogModal/components/ChangelogRenderer.styles.scss +++ b/frontend/src/components/ChangelogModal/components/ChangelogRenderer.styles.scss @@ -2,10 +2,28 @@ position: relative; padding-left: 20px; + & :is(h1, h2, h3, h4, h5, h6, p, &-section-title) { + margin-bottom: 12px; + } + + &-content { + display: flex; + flex-direction: column; + gap: 32px; + } + + &-section-title { + font-size: 14px; + line-height: 20px; + color: var(--text-vanilla-400, #c0c1c3); + } + .changelog-release-date { font-size: 14px; line-height: 20px; color: var(--text-vanilla-400, #c0c1c3); + display: block; + margin-bottom: 12px; } &-list { @@ -81,12 +99,7 @@ } } - h1, - h2, - h3, - h4, - h5, - h6 { + & :is(h1, h2, h3, h4, h5, h6, p, &-section-title) { font-weight: 600; color: var(--text-vanilla-100, #fff); } @@ -96,7 +109,8 @@ line-height: 32px; } - h2 { + h2, + &-section-title { font-size: 20px; line-height: 28px; } @@ -108,6 +122,7 @@ overflow: hidden; border-radius: 4px; border: 1px solid var(--bg-slate-400, #1d212d); + margin-bottom: 28px; } .changelog-media-video { @@ -124,17 +139,8 @@ &-line { background-color: var(--bg-vanilla-300); } - li, - p { - color: var(--text-ink-500); - } - h1, - h2, - h3, - h4, - h5, - h6 { + & :is(h1, h2, h3, h4, h5, h6, p, li, &-section-title) { color: var(--text-ink-500); } diff --git a/frontend/src/components/ChangelogModal/components/ChangelogRenderer.tsx b/frontend/src/components/ChangelogModal/components/ChangelogRenderer.tsx index fe2a7b953e4e..5113b4d6abc4 100644 --- a/frontend/src/components/ChangelogModal/components/ChangelogRenderer.tsx +++ b/frontend/src/components/ChangelogModal/components/ChangelogRenderer.tsx @@ -55,33 +55,35 @@ function ChangelogRenderer({ changelog }: Props): JSX.Element {
{formattedReleaseDate} - {changelog.features && changelog.features.length > 0 && ( -
- {changelog.features.map((feature) => ( -
-

{feature.title}

- {feature.media && renderMedia(feature.media)} - {feature.description} -
- ))} -
- )} - {changelog.bug_fixes && changelog.bug_fixes.length > 0 && ( -
-

Bug Fixes

- {changelog.bug_fixes && ( - {changelog.bug_fixes} - )} -
- )} - {changelog.maintenance && changelog.maintenance.length > 0 && ( -
-

Maintenance

- {changelog.maintenance && ( - {changelog.maintenance} - )} -
- )} +
+ {changelog.features && changelog.features.length > 0 && ( +
+ {changelog.features.map((feature) => ( +
+
{feature.title}
+ {feature.media && renderMedia(feature.media)} + {feature.description} +
+ ))} +
+ )} + {changelog.bug_fixes && changelog.bug_fixes.length > 0 && ( +
+
Bug Fixes
+ {changelog.bug_fixes && ( + {changelog.bug_fixes} + )} +
+ )} + {changelog.maintenance && changelog.maintenance.length > 0 && ( +
+
Maintenance
+ {changelog.maintenance && ( + {changelog.maintenance} + )} +
+ )} +
); } From 011b769d4dd178528545e93b414cba3dbe1259f9 Mon Sep 17 00:00:00 2001 From: SagarRajput-7 <162284829+SagarRajput-7@users.noreply.github.com> Date: Tue, 9 Sep 2025 20:23:36 +0530 Subject: [PATCH 07/51] chore: added resolution brace-expansion to 2.0.2 to fix the vulnerability (#9049) --- frontend/package.json | 3 ++- frontend/yarn.lock | 21 ++++----------------- 2 files changed, 6 insertions(+), 18 deletions(-) diff --git a/frontend/package.json b/frontend/package.json index b1b5d100226e..61514485ffd1 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -275,6 +275,7 @@ "serialize-javascript": "6.0.2", "prismjs": "1.30.0", "got": "11.8.5", - "form-data": "4.0.4" + "form-data": "4.0.4", + "brace-expansion": "^2.0.2" } } diff --git a/frontend/yarn.lock b/frontend/yarn.lock index 99360b670401..09a6c3e65881 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -6899,18 +6899,10 @@ boolbase@^1.0.0: resolved "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz" integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -brace-expansion@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" - integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== +brace-expansion@2.0.2, brace-expansion@^1.1.7, brace-expansion@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.2.tgz#54fc53237a613d854c7bd37463aad17df87214e7" + integrity sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ== dependencies: balanced-match "^1.0.0" @@ -7558,11 +7550,6 @@ compute-scroll-into-view@^3.0.2: resolved "https://registry.npmjs.org/compute-scroll-into-view/-/compute-scroll-into-view-3.0.3.tgz" integrity sha512-nadqwNxghAGTamwIqQSG433W6OADZx2vCo3UXHNrzTRHK/htu+7+L0zhjEoaeaQVNAi3YgqWDv8+tzf0hRfR+A== -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" - integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== - confusing-browser-globals@^1.0.10: version "1.0.11" resolved "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.11.tgz" From f91115948aac9395dc82de6be7cd5f4b337f552d Mon Sep 17 00:00:00 2001 From: Shaheer Kochai Date: Wed, 10 Sep 2025 09:01:57 +0430 Subject: [PATCH 08/51] feat: add support for span hover card in trace details v2 (#8930) * feat: add support for span hover card in trace details v2 * chore: remove the unnecessary tooltip --------- Co-authored-by: Nityananda Gohain --- .../SpanHoverCard/SpanHoverCard.styles.scss | 108 +++++++ .../SpanHoverCard/SpanHoverCard.tsx | 101 ++++++ frontend/src/constants/dateTimeFormats.ts | 1 + .../TraceWaterfallStates/Success/Success.tsx | 302 +++++++++--------- 4 files changed, 364 insertions(+), 148 deletions(-) create mode 100644 frontend/src/components/SpanHoverCard/SpanHoverCard.styles.scss create mode 100644 frontend/src/components/SpanHoverCard/SpanHoverCard.tsx diff --git a/frontend/src/components/SpanHoverCard/SpanHoverCard.styles.scss b/frontend/src/components/SpanHoverCard/SpanHoverCard.styles.scss new file mode 100644 index 000000000000..7c243c8508b6 --- /dev/null +++ b/frontend/src/components/SpanHoverCard/SpanHoverCard.styles.scss @@ -0,0 +1,108 @@ +.span-hover-card { + width: 206px; + + .ant-popover-inner { + background: linear-gradient( + 139deg, + rgba(18, 19, 23, 0.32) 0%, + rgba(18, 19, 23, 0.36) 98.68% + ); + padding: 12px 16px; + border: 1px solid var(--bg-slate-500); + + &::before { + content: ''; + position: absolute; + top: 0; + left: 0; + right: 0; + bottom: 0; + background: linear-gradient( + 139deg, + rgba(18, 19, 23, 0.32) 0%, + rgba(18, 19, 23, 0.36) 98.68% + ); + backdrop-filter: blur(20px); + border-radius: 4px; + z-index: -1; + will-change: background-color, backdrop-filter; + } + } + + &__title { + display: flex; + flex-direction: column; + gap: 0.25rem; + margin-bottom: 0.5rem; + } + + &__operation { + color: var(--bg-vanilla-100); + font-size: 12px; + font-weight: 500; + line-height: 20px; + letter-spacing: 0.48px; + } + + &__service { + font-size: 0.875rem; + color: var(--bg-vanilla-400); + font-weight: 400; + } + + &__error { + font-size: 0.75rem; + color: var(--bg-cherry-500); + font-weight: 500; + } + + &__row { + display: flex; + justify-content: space-between; + align-items: center; + max-width: 174px; + margin-top: 8px; + } + + &__label { + color: var(--bg-vanilla-400); + font-size: 12px; + font-weight: 500; + line-height: 20px; + } + + &__value { + color: var(--bg-vanilla-100); + font-size: 12px; + font-weight: 500; + line-height: 20px; + text-align: right; + } + + &__relative-time { + display: flex; + align-items: center; + margin-top: 4px; + gap: 8px; + border-radius: 1px 0 0 1px; + background: linear-gradient( + 90deg, + hsla(358, 75%, 59%, 0.2) 0%, + rgba(229, 72, 77, 0) 100% + ); + + &-icon { + width: 2px; + height: 20px; + flex-shrink: 0; + border-radius: 2px; + background: var(--bg-cherry-500); + } + } + + &__relative-text { + color: var(--bg-cherry-300); + font-size: 12px; + line-height: 20px; + } +} diff --git a/frontend/src/components/SpanHoverCard/SpanHoverCard.tsx b/frontend/src/components/SpanHoverCard/SpanHoverCard.tsx new file mode 100644 index 000000000000..2ae0d7647e3c --- /dev/null +++ b/frontend/src/components/SpanHoverCard/SpanHoverCard.tsx @@ -0,0 +1,101 @@ +import './SpanHoverCard.styles.scss'; + +import { Popover, Typography } from 'antd'; +import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats'; +import { convertTimeToRelevantUnit } from 'container/TraceDetail/utils'; +import dayjs from 'dayjs'; +import { ReactNode } from 'react'; +import { Span } from 'types/api/trace/getTraceV2'; +import { toFixed } from 'utils/toFixed'; + +interface ITraceMetadata { + startTime: number; + endTime: number; +} + +interface SpanHoverCardProps { + span: Span; + traceMetadata: ITraceMetadata; + children: ReactNode; +} + +function SpanHoverCard({ + span, + traceMetadata, + children, +}: SpanHoverCardProps): JSX.Element { + const duration = span.durationNano / 1e6; // Convert nanoseconds to milliseconds + const { time: formattedDuration, timeUnitName } = convertTimeToRelevantUnit( + duration, + ); + + // Calculate relative start time from trace start + const relativeStartTime = span.timestamp - traceMetadata.startTime; + const { + time: relativeTime, + timeUnitName: relativeTimeUnit, + } = convertTimeToRelevantUnit(relativeStartTime); + + // Format absolute start time + const startTimeFormatted = dayjs(span.timestamp).format( + DATE_TIME_FORMATS.SPAN_POPOVER_DATE, + ); + + const getContent = (): JSX.Element => ( +
+
+ + Duration: + + + {toFixed(formattedDuration, 2)} + {timeUnitName} + +
+
+ + Events: + + + {span.event?.length || 0} + +
+
+ + Start time: + + + {startTimeFormatted} + +
+
+
+ + {toFixed(relativeTime, 2)} + {relativeTimeUnit} after trace start + +
+
+ ); + + return ( + + + {span.name} + +
+ } + content={getContent()} + trigger="hover" + rootClassName="span-hover-card" + autoAdjustOverflow + arrow={false} + > + {children} + + ); +} + +export default SpanHoverCard; diff --git a/frontend/src/constants/dateTimeFormats.ts b/frontend/src/constants/dateTimeFormats.ts index e9a67701322c..440b640eb743 100644 --- a/frontend/src/constants/dateTimeFormats.ts +++ b/frontend/src/constants/dateTimeFormats.ts @@ -29,6 +29,7 @@ export const DATE_TIME_FORMATS = { DATE_SHORT: 'MM/DD', YEAR_SHORT: 'YY', YEAR_MONTH: 'YY-MM', + SPAN_POPOVER_DATE: 'M/D/YY - HH:mm', // Month name formats MONTH_DATE_FULL: 'MMMM DD, YYYY', diff --git a/frontend/src/container/TraceWaterfall/TraceWaterfallStates/Success/Success.tsx b/frontend/src/container/TraceWaterfall/TraceWaterfallStates/Success/Success.tsx index 2ea43fb8a9aa..b7d48b420860 100644 --- a/frontend/src/container/TraceWaterfall/TraceWaterfallStates/Success/Success.tsx +++ b/frontend/src/container/TraceWaterfall/TraceWaterfallStates/Success/Success.tsx @@ -7,6 +7,7 @@ import { Virtualizer } from '@tanstack/react-virtual'; import { Button, Tooltip, Typography } from 'antd'; import cx from 'classnames'; import HttpStatusBadge from 'components/HttpStatusBadge/HttpStatusBadge'; +import SpanHoverCard from 'components/SpanHoverCard/SpanHoverCard'; import { TableV3 } from 'components/TableV3/TableV3'; import { themeColors } from 'constants/theme'; import { convertTimeToRelevantUnit } from 'container/TraceDetail/utils'; @@ -66,6 +67,7 @@ function SpanOverview({ setSelectedSpan, handleAddSpanToFunnel, selectedSpan, + traceMetadata, }: { span: Span; isSpanCollapsed: boolean; @@ -73,6 +75,7 @@ function SpanOverview({ selectedSpan: Span | undefined; setSelectedSpan: Dispatch>; handleAddSpanToFunnel: (span: Span) => void; + traceMetadata: ITraceMetadata; }): JSX.Element { const isRootSpan = span.level === 0; const { hasEditPermission } = useAppContext(); @@ -83,109 +86,111 @@ function SpanOverview({ } return ( -
')`, - backgroundRepeat: 'repeat', - backgroundSize: `${CONNECTOR_WIDTH + 1}px 54px`, - }} - onClick={(): void => { - setSelectedSpan(span); - }} - > - {!isRootSpan && ( -
-
-
- )} -
-
-
- {span.hasChildren ? ( - - ) : ( - - )} - {span.name} + +
')`, + backgroundRepeat: 'repeat', + backgroundSize: `${CONNECTOR_WIDTH + 1}px 54px`, + }} + onClick={(): void => { + setSelectedSpan(span); + }} + > + {!isRootSpan && ( +
+
- -
-
-
- - {span.serviceName} - - {!!span.serviceName && !!span.name && ( -
- · - + )} +
+
+
+ {span.hasChildren ? ( + ) : ( + + )} + {span.name}
- )} -
+ +
+
+
+ + {span.serviceName} + + {!!span.serviceName && !!span.name && ( +
+ · + +
+ )} +
+
-
+ ); } @@ -249,64 +254,64 @@ export function SpanDuration({ }, [leftOffset, width, color]); return ( -
{ - setSelectedSpan(span); - if (span?.spanId) { - urlQuery.set('spanId', span?.spanId); - } - - safeNavigate({ search: urlQuery.toString() }); - }} - > +
{ + setSelectedSpan(span); + if (span?.spanId) { + urlQuery.set('spanId', span?.spanId); + } + + safeNavigate({ search: urlQuery.toString() }); }} > - {span.event?.map((event) => { - const eventTimeMs = event.timeUnixNano / 1e6; - const eventOffsetPercent = - ((eventTimeMs - span.timestamp) / (span.durationNano / 1e6)) * 100; - const clampedOffset = Math.max(1, Math.min(eventOffsetPercent, 99)); - const { isError } = event; - const { time, timeUnitName } = convertTimeToRelevantUnit( - eventTimeMs - span.timestamp, - ); - return ( - -
- - ); - })} -
- {hasActionButtons && } - +
+ {span.event?.map((event) => { + const eventTimeMs = event.timeUnixNano / 1e6; + const eventOffsetPercent = + ((eventTimeMs - span.timestamp) / (span.durationNano / 1e6)) * 100; + const clampedOffset = Math.max(1, Math.min(eventOffsetPercent, 99)); + const { isError } = event; + const { time, timeUnitName } = convertTimeToRelevantUnit( + eventTimeMs - span.timestamp, + ); + return ( + +
+ + ); + })} +
+ {hasActionButtons && } {`${toFixed(time, 2)} ${timeUnitName}`} -
-
+
+
); } @@ -341,6 +346,7 @@ function getWaterfallColumns({ selectedSpan={selectedSpan} setSelectedSpan={setSelectedSpan} handleAddSpanToFunnel={handleAddSpanToFunnel} + traceMetadata={traceMetadata} /> ), size: 450, From f82e9b55f8258909f9dbb2c94bccf9a14c18b720 Mon Sep 17 00:00:00 2001 From: SagarRajput-7 <162284829+SagarRajput-7@users.noreply.github.com> Date: Wed, 10 Sep 2025 10:51:06 +0530 Subject: [PATCH 09/51] fix: logs explorer chart severity text bugfixes (#8731) * fix: fixed severity color getting incorrectly assigned due to the response changed in v5 API * fix: implement consistent severity variant colors across logs chart and indicator component * chore: fix the failing tests * chore: fix the failing check --------- Co-authored-by: ahmadshaheer Co-authored-by: Nityananda Gohain --- .../src/components/Logs/ListLogView/index.tsx | 6 +- .../LogStateIndicator.styles.scss | 106 ++++++++++++++++-- .../LogStateIndicator.test.tsx | 34 +++--- .../LogStateIndicator/LogStateIndicator.tsx | 102 ++++++++++++++++- .../Logs/LogStateIndicator/utils.ts | 2 +- .../src/components/Logs/RawLogView/index.tsx | 6 +- .../Logs/TableView/useTableView.tsx | 4 +- .../src/container/LogsExplorerChart/utils.ts | 68 +++++++++++ .../ColumnView/ColumnView.tsx | 9 +- 9 files changed, 299 insertions(+), 38 deletions(-) diff --git a/frontend/src/components/Logs/ListLogView/index.tsx b/frontend/src/components/Logs/ListLogView/index.tsx index bdc8b2e77f0c..c77d31ddc81f 100644 --- a/frontend/src/components/Logs/ListLogView/index.tsx +++ b/frontend/src/components/Logs/ListLogView/index.tsx @@ -208,7 +208,11 @@ function ListLogView({ fontSize={fontSize} >
- +
{updatedSelecedFields.some((field) => field.name === 'body') && ( diff --git a/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.styles.scss b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.styles.scss index 5c2720e954c6..b3e51fe54f42 100644 --- a/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.styles.scss +++ b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.styles.scss @@ -7,7 +7,6 @@ height: 100%; width: 3px; border-radius: 50px; - background-color: transparent; &.small { min-height: 16px; @@ -21,24 +20,107 @@ min-height: 24px; } - &.INFO { - background-color: var(--bg-robin-500); + // Severity variant CSS classes using design tokens + // Trace variants - + &.severity-trace-0 { + background-color: var(--bg-forest-600); } - &.WARNING, - &.WARN { - background-color: var(--bg-amber-500); + &.severity-trace-1 { + background-color: var(--bg-forest-500); } - &.ERROR { - background-color: var(--bg-cherry-500); - } - &.TRACE { + &.severity-trace-2 { background-color: var(--bg-forest-400); } - &.DEBUG { + &.severity-trace-3 { + background-color: var(--bg-forest-300); + } + &.severity-trace-4 { + background-color: var(--bg-forest-200); + } + + // Debug variants + &.severity-debug-0 { + background-color: var(--bg-aqua-600); + } + &.severity-debug-1 { background-color: var(--bg-aqua-500); } - &.FATAL { + &.severity-debug-2 { + background-color: var(--bg-aqua-400); + } + &.severity-debug-3 { + background-color: var(--bg-aqua-300); + } + &.severity-debug-4 { + background-color: var(--bg-aqua-200); + } + + // Info variants + &.severity-info-0 { + background-color: var(--bg-robin-600); + } + &.severity-info-1 { + background-color: var(--bg-robin-500); + } + &.severity-info-2 { + background-color: var(--bg-robin-400); + } + &.severity-info-3 { + background-color: var(--bg-robin-300); + } + &.severity-info-4 { + background-color: var(--bg-robin-200); + } + + // Warn variants + &.severity-warn-0 { + background-color: var(--bg-amber-600); + } + &.severity-warn-1 { + background-color: var(--bg-amber-500); + } + &.severity-warn-2 { + background-color: var(--bg-amber-400); + } + &.severity-warn-3 { + background-color: var(--bg-amber-300); + } + &.severity-warn-4 { + background-color: var(--bg-amber-200); + } + + // Error variants + &.severity-error-0 { + background-color: var(--bg-cherry-600); + } + &.severity-error-1 { + background-color: var(--bg-cherry-500); + } + &.severity-error-2 { + background-color: var(--bg-cherry-400); + } + &.severity-error-3 { + background-color: var(--bg-cherry-300); + } + &.severity-error-4 { + background-color: var(--bg-cherry-200); + } + + // Fatal variants + &.severity-fatal-0 { + background-color: var(--bg-sakura-600); + } + &.severity-fatal-1 { background-color: var(--bg-sakura-500); } + &.severity-fatal-2 { + background-color: var(--bg-sakura-400); + } + &.severity-fatal-3 { + background-color: var(--bg-sakura-300); + } + &.severity-fatal-4 { + background-color: var(--bg-sakura-200); + } } } diff --git a/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.test.tsx b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.test.tsx index 5ecddd5959c5..086710d74bbe 100644 --- a/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.test.tsx +++ b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.test.tsx @@ -6,37 +6,41 @@ import LogStateIndicator from './LogStateIndicator'; describe('LogStateIndicator', () => { it('renders correctly with default props', () => { const { container } = render( - , + , ); const indicator = container.firstChild as HTMLElement; expect(indicator.classList.contains('log-state-indicator')).toBe(true); expect(indicator.classList.contains('isActive')).toBe(false); expect(container.querySelector('.line')).toBeTruthy(); - expect(container.querySelector('.line')?.classList.contains('INFO')).toBe( - true, - ); + expect( + container.querySelector('.line')?.classList.contains('severity-info-0'), + ).toBe(true); }); it('renders correctly with different types', () => { const { container: containerInfo } = render( - , - ); - expect(containerInfo.querySelector('.line')?.classList.contains('INFO')).toBe( - true, - ); - - const { container: containerWarning } = render( - , + , ); expect( - containerWarning.querySelector('.line')?.classList.contains('WARNING'), + containerInfo.querySelector('.line')?.classList.contains('severity-info-0'), + ).toBe(true); + + const { container: containerWarning } = render( + , + ); + expect( + containerWarning + .querySelector('.line') + ?.classList.contains('severity-warn-0'), ).toBe(true); const { container: containerError } = render( - , + , ); expect( - containerError.querySelector('.line')?.classList.contains('ERROR'), + containerError + .querySelector('.line') + ?.classList.contains('severity-error-0'), ).toBe(true); }); }); diff --git a/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.tsx b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.tsx index f831c6252a88..7f2eeb4ecaf9 100644 --- a/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.tsx +++ b/frontend/src/components/Logs/LogStateIndicator/LogStateIndicator.tsx @@ -3,6 +3,8 @@ import './LogStateIndicator.styles.scss'; import cx from 'classnames'; import { FontSize } from 'container/OptionsMenu/types'; +import { getLogTypeBySeverityNumber } from './utils'; + export const SEVERITY_TEXT_TYPE = { TRACE: 'TRACE', TRACE2: 'TRACE2', @@ -42,18 +44,112 @@ export const LogType = { UNKNOWN: 'UNKNOWN', } as const; +// Severity variant mapping to CSS classes +const SEVERITY_VARIANT_CLASSES: Record = { + // Trace variants - forest-600 to forest-200 + TRACE: 'severity-trace-0', + Trace: 'severity-trace-1', + trace: 'severity-trace-2', + trc: 'severity-trace-3', + Trc: 'severity-trace-4', + + // Debug variants - aqua-600 to aqua-200 + DEBUG: 'severity-debug-0', + Debug: 'severity-debug-1', + debug: 'severity-debug-2', + dbg: 'severity-debug-3', + Dbg: 'severity-debug-4', + + // Info variants - robin-600 to robin-200 + INFO: 'severity-info-0', + Info: 'severity-info-1', + info: 'severity-info-2', + Information: 'severity-info-3', + information: 'severity-info-4', + + // Warn variants - amber-600 to amber-200 + WARN: 'severity-warn-0', + WARNING: 'severity-warn-0', + Warn: 'severity-warn-1', + warn: 'severity-warn-2', + warning: 'severity-warn-3', + Warning: 'severity-warn-4', + wrn: 'severity-warn-3', + Wrn: 'severity-warn-4', + + // Error variants - cherry-600 to cherry-200 + // eslint-disable-next-line sonarjs/no-duplicate-string + ERROR: 'severity-error-0', + Error: 'severity-error-1', + error: 'severity-error-2', + err: 'severity-error-3', + Err: 'severity-error-4', + ERR: 'severity-error-0', + fail: 'severity-error-2', + Fail: 'severity-error-3', + FAIL: 'severity-error-0', + + // Fatal variants - sakura-600 to sakura-200 + // eslint-disable-next-line sonarjs/no-duplicate-string + FATAL: 'severity-fatal-0', + Fatal: 'severity-fatal-1', + fatal: 'severity-fatal-2', + // eslint-disable-next-line sonarjs/no-duplicate-string + critical: 'severity-fatal-3', + Critical: 'severity-fatal-4', + CRITICAL: 'severity-fatal-0', + crit: 'severity-fatal-3', + Crit: 'severity-fatal-4', + CRIT: 'severity-fatal-0', + panic: 'severity-fatal-2', + Panic: 'severity-fatal-3', + PANIC: 'severity-fatal-0', +}; + +function getSeverityClass( + severityText?: string, + severityNumber?: number, +): string { + // Priority 1: Use severityText for exact variant mapping + if (severityText) { + const variantClass = SEVERITY_VARIANT_CLASSES[severityText.trim()]; + if (variantClass) { + return variantClass; + } + } + + // Priority 2: Use severityNumber for base color (use middle shade as default) + if (severityNumber) { + const logType = getLogTypeBySeverityNumber(severityNumber); + if (logType !== LogType.UNKNOWN) { + return `severity-${logType.toLowerCase()}-0`; // Use middle shade (index 2) + } + } + + return 'severity-info-0'; // Fallback to CSS classes based on type +} + function LogStateIndicator({ - type, fontSize, + severityText, + severityNumber, }: { - type: string; fontSize: FontSize; + severityText?: string; + severityNumber?: number; }): JSX.Element { + const severityClass = getSeverityClass(severityText, severityNumber); + return (
-
+
); } +LogStateIndicator.defaultProps = { + severityText: '', + severityNumber: 0, +}; + export default LogStateIndicator; diff --git a/frontend/src/components/Logs/LogStateIndicator/utils.ts b/frontend/src/components/Logs/LogStateIndicator/utils.ts index 03989a8dd602..963f319aceb0 100644 --- a/frontend/src/components/Logs/LogStateIndicator/utils.ts +++ b/frontend/src/components/Logs/LogStateIndicator/utils.ts @@ -41,7 +41,7 @@ const getLogTypeBySeverityText = (severityText: string): string => { }; // https://opentelemetry.io/docs/specs/otel/logs/data-model/#field-severitynumber -const getLogTypeBySeverityNumber = (severityNumber: number): string => { +export const getLogTypeBySeverityNumber = (severityNumber: number): string => { if (severityNumber < 1) { return LogType.UNKNOWN; } diff --git a/frontend/src/components/Logs/RawLogView/index.tsx b/frontend/src/components/Logs/RawLogView/index.tsx index 4ad7329f8353..c1f1cc346c69 100644 --- a/frontend/src/components/Logs/RawLogView/index.tsx +++ b/frontend/src/components/Logs/RawLogView/index.tsx @@ -192,7 +192,11 @@ function RawLogView({ onMouseLeave={handleMouseLeave} fontSize={fontSize} > - + { children: (
), diff --git a/frontend/src/container/LogsExplorerChart/utils.ts b/frontend/src/container/LogsExplorerChart/utils.ts index 40253750da26..dc51d005d2f1 100644 --- a/frontend/src/container/LogsExplorerChart/utils.ts +++ b/frontend/src/container/LogsExplorerChart/utils.ts @@ -2,12 +2,80 @@ import { Color } from '@signozhq/design-tokens'; import { themeColors } from 'constants/theme'; import { colors } from 'lib/getRandomColor'; +const SEVERITY_VARIANT_COLORS: Record = { + TRACE: Color.BG_FOREST_600, + Trace: Color.BG_FOREST_500, + trace: Color.BG_FOREST_400, + trc: Color.BG_FOREST_300, + Trc: Color.BG_FOREST_200, + + DEBUG: Color.BG_AQUA_600, + Debug: Color.BG_AQUA_500, + debug: Color.BG_AQUA_400, + dbg: Color.BG_AQUA_300, + Dbg: Color.BG_AQUA_200, + + INFO: Color.BG_ROBIN_600, + Info: Color.BG_ROBIN_500, + info: Color.BG_ROBIN_400, + Information: Color.BG_ROBIN_300, + information: Color.BG_ROBIN_200, + + WARN: Color.BG_AMBER_600, + Warn: Color.BG_AMBER_500, + warn: Color.BG_AMBER_400, + warning: Color.BG_AMBER_300, + Warning: Color.BG_AMBER_200, + wrn: Color.BG_AMBER_300, + Wrn: Color.BG_AMBER_200, + + ERROR: Color.BG_CHERRY_600, + Error: Color.BG_CHERRY_500, + error: Color.BG_CHERRY_400, + err: Color.BG_CHERRY_300, + Err: Color.BG_CHERRY_200, + ERR: Color.BG_CHERRY_600, + fail: Color.BG_CHERRY_400, + Fail: Color.BG_CHERRY_300, + FAIL: Color.BG_CHERRY_600, + + FATAL: Color.BG_SAKURA_600, + Fatal: Color.BG_SAKURA_500, + fatal: Color.BG_SAKURA_400, + critical: Color.BG_SAKURA_300, + Critical: Color.BG_SAKURA_200, + CRITICAL: Color.BG_SAKURA_600, + crit: Color.BG_SAKURA_300, + Crit: Color.BG_SAKURA_200, + CRIT: Color.BG_SAKURA_600, + panic: Color.BG_SAKURA_400, + Panic: Color.BG_SAKURA_300, + PANIC: Color.BG_SAKURA_600, +}; + +// Simple function to get severity color for any component +export function getSeverityColor(severityText: string): string { + const variantColor = SEVERITY_VARIANT_COLORS[severityText.trim()]; + if (variantColor) { + return variantColor; + } + + return Color.BG_ROBIN_500; // Default fallback +} + export function getColorsForSeverityLabels( label: string, index: number, ): string { + // Check if we have a direct mapping for this severity variant + const variantColor = SEVERITY_VARIANT_COLORS[label.trim()]; + if (variantColor) { + return variantColor; + } + const lowerCaseLabel = label.toLowerCase(); + // Fallback to old format for backward compatibility if (lowerCaseLabel.includes(`{severity_text="trace"}`)) { return Color.BG_FOREST_400; } diff --git a/frontend/src/container/LogsExplorerList/ColumnView/ColumnView.tsx b/frontend/src/container/LogsExplorerList/ColumnView/ColumnView.tsx index 052095277e26..9e65d648fa4c 100644 --- a/frontend/src/container/LogsExplorerList/ColumnView/ColumnView.tsx +++ b/frontend/src/container/LogsExplorerList/ColumnView/ColumnView.tsx @@ -4,7 +4,6 @@ import { ColumnDef, DataTable, Row } from '@signozhq/table'; import LogDetail from 'components/LogDetail'; import { VIEW_TYPES } from 'components/LogDetail/constants'; import LogStateIndicator from 'components/Logs/LogStateIndicator/LogStateIndicator'; -import { getLogIndicatorTypeForTable } from 'components/Logs/LogStateIndicator/utils'; import { useTableView } from 'components/Logs/TableView/useTableView'; import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats'; import { LOCALSTORAGE } from 'constants/localStorage'; @@ -169,10 +168,14 @@ function ColumnView({ getValue: () => string | JSX.Element; }): string | JSX.Element => { if (field.key === 'state-indicator') { - const type = getLogIndicatorTypeForTable(row.original); const fontSize = options.fontSize as FontSize; - return ; + return ( + + ); } const isTimestamp = field.key === 'timestamp'; From f23000831c3743078acc800928b33a62c2824a59 Mon Sep 17 00:00:00 2001 From: SagarRajput-7 <162284829+SagarRajput-7@users.noreply.github.com> Date: Wed, 10 Sep 2025 11:22:57 +0530 Subject: [PATCH 10/51] feat: update yarn lock file (#9055) --- frontend/yarn.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/yarn.lock b/frontend/yarn.lock index 09a6c3e65881..0ce1ca1a5b21 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -6899,7 +6899,7 @@ boolbase@^1.0.0: resolved "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz" integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== -brace-expansion@2.0.2, brace-expansion@^1.1.7, brace-expansion@^2.0.1: +brace-expansion@^1.1.7, brace-expansion@^2.0.1, brace-expansion@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.2.tgz#54fc53237a613d854c7bd37463aad17df87214e7" integrity sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ== From 31e042adf70f309669ef500567377eb9b9015d28 Mon Sep 17 00:00:00 2001 From: Vibhu Pandey Date: Wed, 10 Sep 2025 13:58:13 +0530 Subject: [PATCH 11/51] feat(alertmanager): deprecate legacy alertmanager (#9046) - Deprecate legacy alertmanager. Are the new alert improvements compatible with legacy? I don't think they are. More importantly, I don't think they should be. It will be a pain to manage it at both places. - Improve msteamsv2 experience. I have taken alertmanager's upstream and merged it with our custom implementation. Note the use of `titleLink` field to propagate the url of the alert. - Delete the private http server needed for alertmanager. It's cleanup as part of 1. --- conf/example.yaml | 5 +- ee/query-service/app/server.go | 95 ---- .../alertmanagernotifytest/test.go | 179 +++++++ .../alertmanagernotify/msteamsv2/msteamsv2.go | 265 ++++++++++ .../msteamsv2/msteamsv2_test.go | 220 ++++++++ .../alertmanagernotify/receiver.go | 51 ++ pkg/alertmanager/alertmanagerserver/server.go | 7 +- pkg/alertmanager/config.go | 18 +- pkg/alertmanager/config_test.go | 13 +- .../legacyalertmanager/provider.go | 482 ------------------ .../legacyalertmanager/provider_test.go | 35 -- pkg/query-service/app/http_handler.go | 10 +- pkg/query-service/app/server.go | 83 --- pkg/signoz/config.go | 14 - pkg/signoz/provider.go | 2 - pkg/types/alertmanagertypes/channel.go | 23 - pkg/types/alertmanagertypes/receiver.go | 36 +- pkg/types/alertmanagertypes/template.go | 1 + 18 files changed, 737 insertions(+), 802 deletions(-) create mode 100644 pkg/alertmanager/alertmanagernotify/alertmanagernotifytest/test.go create mode 100644 pkg/alertmanager/alertmanagernotify/msteamsv2/msteamsv2.go create mode 100644 pkg/alertmanager/alertmanagernotify/msteamsv2/msteamsv2_test.go create mode 100644 pkg/alertmanager/alertmanagernotify/receiver.go delete mode 100644 pkg/alertmanager/legacyalertmanager/provider.go delete mode 100644 pkg/alertmanager/legacyalertmanager/provider_test.go diff --git a/conf/example.yaml b/conf/example.yaml index 7fd1fb9e976c..d22fa37cab0e 100644 --- a/conf/example.yaml +++ b/conf/example.yaml @@ -137,10 +137,7 @@ prometheus: ##################### Alertmanager ##################### alertmanager: # Specifies the alertmanager provider to use. - provider: legacy - legacy: - # The API URL (with prefix) of the legacy Alertmanager instance. - api_url: http://localhost:9093/api + provider: signoz signoz: # The poll interval for periodically syncing the alertmanager with the config in the store. poll_interval: 1m diff --git a/ee/query-service/app/server.go b/ee/query-service/app/server.go index fc516f49e95a..b83dd4f51b10 100644 --- a/ee/query-service/app/server.go +++ b/ee/query-service/app/server.go @@ -44,19 +44,6 @@ import ( "go.uber.org/zap" ) -type ServerOptions struct { - Config signoz.Config - SigNoz *signoz.SigNoz - HTTPHostPort string - PrivateHostPort string - PreferSpanMetrics bool - FluxInterval string - FluxIntervalForTraceDetail string - Cluster string - GatewayUrl string - Jwt *authtypes.JWT -} - // Server runs HTTP, Mux and a grpc server type Server struct { config signoz.Config @@ -69,11 +56,6 @@ type Server struct { httpServer *http.Server httpHostPort string - // private http - privateConn net.Listener - privateHTTP *http.Server - privateHostPort string - opampServer *opamp.Server // Usage manager @@ -183,7 +165,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) jwt: jwt, ruleManager: rm, httpHostPort: baseconst.HTTPHostPort, - privateHostPort: baseconst.PrivateHostPort, unavailableChannel: make(chan healthcheck.Status), usageManager: usageManager, } @@ -196,13 +177,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) s.httpServer = httpServer - privateServer, err := s.createPrivateServer(apiHandler) - if err != nil { - return nil, err - } - - s.privateHTTP = privateServer - s.opampServer = opamp.InitializeServer( &opAmpModel.AllAgents, agentConfMgr, signoz.Instrumentation, ) @@ -215,36 +189,6 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status { return s.unavailableChannel } -func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) { - r := baseapp.NewRouter() - - r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap) - r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap) - r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(), - s.config.APIServer.Timeout.ExcludedRoutes, - s.config.APIServer.Timeout.Default, - s.config.APIServer.Timeout.Max, - ).Wrap) - r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap) - - apiHandler.RegisterPrivateRoutes(r) - - c := cors.New(cors.Options{ - //todo(amol): find out a way to add exact domain or - // ip here for alert manager - AllowedOrigins: []string{"*"}, - AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH"}, - AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "SIGNOZ-API-KEY", "X-SIGNOZ-QUERY-ID", "Sec-WebSocket-Protocol"}, - }) - - handler := c.Handler(r) - handler = handlers.CompressHandler(handler) - - return &http.Server{ - Handler: handler, - }, nil -} - func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) { r := baseapp.NewRouter() am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger()) @@ -310,19 +254,6 @@ func (s *Server) initListeners() error { zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort)) - // listen on private port to support internal services - privateHostPort := s.privateHostPort - - if privateHostPort == "" { - return fmt.Errorf("baseconst.PrivateHostPort is required") - } - - s.privateConn, err = net.Listen("tcp", privateHostPort) - if err != nil { - return err - } - zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.privateHostPort)) - return nil } @@ -361,26 +292,6 @@ func (s *Server) Start(ctx context.Context) error { } }() - var privatePort int - if port, err := utils.GetPort(s.privateConn.Addr()); err == nil { - privatePort = port - } - - go func() { - zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.privateHostPort)) - - switch err := s.privateHTTP.Serve(s.privateConn); err { - case nil, http.ErrServerClosed, cmux.ErrListenerClosed: - // normal exit, nothing to do - zap.L().Info("private http server closed") - default: - zap.L().Error("Could not start private HTTP server", zap.Error(err)) - } - - s.unavailableChannel <- healthcheck.Unavailable - - }() - go func() { zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint)) err := s.opampServer.Start(baseconst.OpAmpWsEndpoint) @@ -400,12 +311,6 @@ func (s *Server) Stop(ctx context.Context) error { } } - if s.privateHTTP != nil { - if err := s.privateHTTP.Shutdown(ctx); err != nil { - return err - } - } - s.opampServer.Stop() if s.ruleManager != nil { diff --git a/pkg/alertmanager/alertmanagernotify/alertmanagernotifytest/test.go b/pkg/alertmanager/alertmanagernotify/alertmanagernotifytest/test.go new file mode 100644 index 000000000000..3b983c4fee7e --- /dev/null +++ b/pkg/alertmanager/alertmanagernotify/alertmanagernotifytest/test.go @@ -0,0 +1,179 @@ +package test + +import ( + "context" + "net/http" + "net/http/httptest" + "net/url" + "testing" + "time" + + "github.com/SigNoz/signoz/pkg/types/alertmanagertypes" + "github.com/prometheus/common/model" + "github.com/stretchr/testify/require" + + "github.com/prometheus/alertmanager/notify" + "github.com/prometheus/alertmanager/template" + "github.com/prometheus/alertmanager/types" +) + +// RetryTests returns a map of HTTP status codes to bool indicating whether the notifier should retry or not. +func RetryTests(retryCodes []int) map[int]bool { + tests := map[int]bool{ + // 1xx + http.StatusContinue: false, + http.StatusSwitchingProtocols: false, + http.StatusProcessing: false, + + // 2xx + http.StatusOK: false, + http.StatusCreated: false, + http.StatusAccepted: false, + http.StatusNonAuthoritativeInfo: false, + http.StatusNoContent: false, + http.StatusResetContent: false, + http.StatusPartialContent: false, + http.StatusMultiStatus: false, + http.StatusAlreadyReported: false, + http.StatusIMUsed: false, + + // 3xx + http.StatusMultipleChoices: false, + http.StatusMovedPermanently: false, + http.StatusFound: false, + http.StatusSeeOther: false, + http.StatusNotModified: false, + http.StatusUseProxy: false, + http.StatusTemporaryRedirect: false, + http.StatusPermanentRedirect: false, + + // 4xx + http.StatusBadRequest: false, + http.StatusUnauthorized: false, + http.StatusPaymentRequired: false, + http.StatusForbidden: false, + http.StatusNotFound: false, + http.StatusMethodNotAllowed: false, + http.StatusNotAcceptable: false, + http.StatusProxyAuthRequired: false, + http.StatusRequestTimeout: false, + http.StatusConflict: false, + http.StatusGone: false, + http.StatusLengthRequired: false, + http.StatusPreconditionFailed: false, + http.StatusRequestEntityTooLarge: false, + http.StatusRequestURITooLong: false, + http.StatusUnsupportedMediaType: false, + http.StatusRequestedRangeNotSatisfiable: false, + http.StatusExpectationFailed: false, + http.StatusTeapot: false, + http.StatusUnprocessableEntity: false, + http.StatusLocked: false, + http.StatusFailedDependency: false, + http.StatusUpgradeRequired: false, + http.StatusPreconditionRequired: false, + http.StatusTooManyRequests: false, + http.StatusRequestHeaderFieldsTooLarge: false, + http.StatusUnavailableForLegalReasons: false, + + // 5xx + http.StatusInternalServerError: false, + http.StatusNotImplemented: false, + http.StatusBadGateway: false, + http.StatusServiceUnavailable: false, + http.StatusGatewayTimeout: false, + http.StatusHTTPVersionNotSupported: false, + http.StatusVariantAlsoNegotiates: false, + http.StatusInsufficientStorage: false, + http.StatusLoopDetected: false, + http.StatusNotExtended: false, + http.StatusNetworkAuthenticationRequired: false, + } + + for _, statusCode := range retryCodes { + tests[statusCode] = true + } + + return tests +} + +// DefaultRetryCodes returns the list of HTTP status codes that need to be retried. +func DefaultRetryCodes() []int { + return []int{ + http.StatusInternalServerError, + http.StatusNotImplemented, + http.StatusBadGateway, + http.StatusServiceUnavailable, + http.StatusGatewayTimeout, + http.StatusHTTPVersionNotSupported, + http.StatusVariantAlsoNegotiates, + http.StatusInsufficientStorage, + http.StatusLoopDetected, + http.StatusNotExtended, + http.StatusNetworkAuthenticationRequired, + } +} + +// CreateTmpl returns a ready-to-use template. +func CreateTmpl(t *testing.T) *template.Template { + tmpl, err := alertmanagertypes.FromGlobs([]string{}) + require.NoError(t, err) + tmpl.ExternalURL, _ = url.Parse("http://am") + return tmpl +} + +// AssertNotifyLeaksNoSecret calls the Notify() method of the notifier, expects +// it to fail because the context is canceled by the server and checks that no +// secret data is leaked in the error message returned by Notify(). +func AssertNotifyLeaksNoSecret(ctx context.Context, t *testing.T, n notify.Notifier, secret ...string) { + t.Helper() + require.NotEmpty(t, secret) + + ctx = notify.WithGroupKey(ctx, "1") + ok, err := n.Notify(ctx, []*types.Alert{ + { + Alert: model.Alert{ + Labels: model.LabelSet{ + "lbl1": "val1", + }, + StartsAt: time.Now(), + EndsAt: time.Now().Add(time.Hour), + }, + }, + }...) + + require.Error(t, err) + require.Contains(t, err.Error(), context.Canceled.Error()) + for _, s := range secret { + require.NotContains(t, err.Error(), s) + } + require.True(t, ok) +} + +// GetContextWithCancelingURL returns a context that gets canceled when a +// client does a GET request to the returned URL. +// Handlers passed to the function will be invoked in order before the context gets canceled. +// The last argument is a function that needs to be called before the caller returns. +func GetContextWithCancelingURL(h ...func(w http.ResponseWriter, r *http.Request)) (context.Context, *url.URL, func()) { + done := make(chan struct{}) + ctx, cancel := context.WithCancel(context.Background()) + i := 0 + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if i < len(h) { + h[i](w, r) + } else { + cancel() + <-done + } + i++ + })) + + // No need to check the error since httptest.NewServer always return a valid URL. + u, _ := url.Parse(srv.URL) + + return ctx, u, func() { + close(done) + srv.Close() + } +} diff --git a/pkg/alertmanager/alertmanagernotify/msteamsv2/msteamsv2.go b/pkg/alertmanager/alertmanagernotify/msteamsv2/msteamsv2.go new file mode 100644 index 000000000000..d2be7ed19975 --- /dev/null +++ b/pkg/alertmanager/alertmanagernotify/msteamsv2/msteamsv2.go @@ -0,0 +1,265 @@ +package msteamsv2 + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "log/slog" + "net/http" + "os" + "slices" + "strings" + + commoncfg "github.com/prometheus/common/config" + "github.com/prometheus/common/model" + + "github.com/prometheus/alertmanager/config" + "github.com/prometheus/alertmanager/notify" + "github.com/prometheus/alertmanager/template" + "github.com/prometheus/alertmanager/types" +) + +const ( + colorRed = "Attention" + colorGreen = "Good" + colorGrey = "Warning" +) + +type Notifier struct { + conf *config.MSTeamsV2Config + titleLink string + tmpl *template.Template + logger *slog.Logger + client *http.Client + retrier *notify.Retrier + webhookURL *config.SecretURL + postJSONFunc func(ctx context.Context, client *http.Client, url string, body io.Reader) (*http.Response, error) +} + +// https://learn.microsoft.com/en-us/connectors/teams/?tabs=text1#adaptivecarditemschema +type Content struct { + Schema string `json:"$schema"` + Type string `json:"type"` + Version string `json:"version"` + Body []Body `json:"body"` + Msteams Msteams `json:"msteams,omitempty"` + Actions []Action `json:"actions"` +} + +type Body struct { + Type string `json:"type"` + Text string `json:"text"` + Weight string `json:"weight,omitempty"` + Size string `json:"size,omitempty"` + Wrap bool `json:"wrap,omitempty"` + Style string `json:"style,omitempty"` + Color string `json:"color,omitempty"` + Facts []Fact `json:"facts,omitempty"` +} + +type Action struct { + Type string `json:"type"` + Title string `json:"title"` + URL string `json:"url"` +} + +type Fact struct { + Title string `json:"title"` + Value string `json:"value"` +} + +type Msteams struct { + Width string `json:"width"` +} + +type Attachment struct { + ContentType string `json:"contentType"` + ContentURL *string `json:"contentUrl"` // Use a pointer to handle null values + Content Content `json:"content"` +} + +type teamsMessage struct { + Type string `json:"type"` + Attachments []Attachment `json:"attachments"` +} + +// New returns a new notifier that uses the Microsoft Teams Power Platform connector. +func New(c *config.MSTeamsV2Config, t *template.Template, titleLink string, l *slog.Logger, httpOpts ...commoncfg.HTTPClientOption) (*Notifier, error) { + client, err := commoncfg.NewClientFromConfig(*c.HTTPConfig, "msteamsv2", httpOpts...) + if err != nil { + return nil, err + } + + n := &Notifier{ + conf: c, + titleLink: titleLink, + tmpl: t, + logger: l, + client: client, + retrier: ¬ify.Retrier{}, + webhookURL: c.WebhookURL, + postJSONFunc: notify.PostJSON, + } + + return n, nil +} + +func (n *Notifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error) { + key, err := notify.ExtractGroupKey(ctx) + if err != nil { + return false, err + } + + n.logger.DebugContext(ctx, "extracted group key", "key", key) + + data := notify.GetTemplateData(ctx, n.tmpl, as, n.logger) + tmpl := notify.TmplText(n.tmpl, data, &err) + if err != nil { + return false, err + } + + title := tmpl(n.conf.Title) + if err != nil { + return false, err + } + + titleLink := tmpl(n.titleLink) + if err != nil { + return false, err + } + + alerts := types.Alerts(as...) + color := colorGrey + switch alerts.Status() { + case model.AlertFiring: + color = colorRed + case model.AlertResolved: + color = colorGreen + } + + var url string + if n.conf.WebhookURL != nil { + url = n.conf.WebhookURL.String() + } else { + content, err := os.ReadFile(n.conf.WebhookURLFile) + if err != nil { + return false, fmt.Errorf("read webhook_url_file: %w", err) + } + url = strings.TrimSpace(string(content)) + } + + // A message as referenced in https://learn.microsoft.com/en-us/connectors/teams/?tabs=text1%2Cdotnet#request-body-schema + t := teamsMessage{ + Type: "message", + Attachments: []Attachment{ + { + ContentType: "application/vnd.microsoft.card.adaptive", + ContentURL: nil, + Content: Content{ + Schema: "http://adaptivecards.io/schemas/adaptive-card.json", + Type: "AdaptiveCard", + Version: "1.2", + Body: []Body{ + { + Type: "TextBlock", + Text: title, + Weight: "Bolder", + Size: "Medium", + Wrap: true, + Style: "heading", + Color: color, + }, + }, + Actions: []Action{ + { + Type: "Action.OpenUrl", + Title: "View Alert", + URL: titleLink, + }, + }, + Msteams: Msteams{ + Width: "full", + }, + }, + }, + }, + } + + // add labels and annotations to the body of all alerts + for _, alert := range as { + t.Attachments[0].Content.Body = append(t.Attachments[0].Content.Body, Body{ + Type: "TextBlock", + Text: "Alerts", + Weight: "Bolder", + Size: "Medium", + Wrap: true, + Color: color, + }) + + t.Attachments[0].Content.Body = append(t.Attachments[0].Content.Body, n.createLabelsAndAnnotationsBody(alert)...) + } + + var payload bytes.Buffer + if err = json.NewEncoder(&payload).Encode(t); err != nil { + return false, err + } + + resp, err := n.postJSONFunc(ctx, n.client, url, &payload) //nolint:bodyclose + if err != nil { + return true, notify.RedactURL(err) + } + defer notify.Drain(resp) //drain is used to close the body of the response hence the nolint directive + + // https://learn.microsoft.com/en-us/microsoftteams/platform/webhooks-and-connectors/how-to/connectors-using?tabs=cURL#rate-limiting-for-connectors + shouldRetry, err := n.retrier.Check(resp.StatusCode, resp.Body) + if err != nil { + return shouldRetry, notify.NewErrorWithReason(notify.GetFailureReasonFromStatusCode(resp.StatusCode), err) + } + return shouldRetry, err +} + +func (*Notifier) createLabelsAndAnnotationsBody(alert *types.Alert) []Body { + bodies := []Body{} + bodies = append(bodies, Body{ + Type: "TextBlock", + Text: "Labels", + Weight: "Bolder", + Size: "Medium", + }) + + facts := []Fact{} + for k, v := range alert.Labels { + if slices.Contains([]string{"alertname", "severity", "ruleId", "ruleSource"}, string(k)) { + continue + } + facts = append(facts, Fact{Title: string(k), Value: string(v)}) + } + bodies = append(bodies, Body{ + Type: "FactSet", + Facts: facts, + }) + + bodies = append(bodies, Body{ + Type: "TextBlock", + Text: "Annotations", + Weight: "Bolder", + Size: "Medium", + }) + + annotationsFacts := []Fact{} + for k, v := range alert.Annotations { + if slices.Contains([]string{"summary", "related_logs", "related_traces"}, string(k)) { + continue + } + annotationsFacts = append(annotationsFacts, Fact{Title: string(k), Value: string(v)}) + } + + bodies = append(bodies, Body{ + Type: "FactSet", + Facts: annotationsFacts, + }) + + return bodies +} diff --git a/pkg/alertmanager/alertmanagernotify/msteamsv2/msteamsv2_test.go b/pkg/alertmanager/alertmanagernotify/msteamsv2/msteamsv2_test.go new file mode 100644 index 000000000000..2a0d884bb1e1 --- /dev/null +++ b/pkg/alertmanager/alertmanagernotify/msteamsv2/msteamsv2_test.go @@ -0,0 +1,220 @@ +package msteamsv2 + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "net/url" + "os" + "testing" + "time" + + commoncfg "github.com/prometheus/common/config" + "github.com/prometheus/common/model" + "github.com/prometheus/common/promslog" + "github.com/stretchr/testify/require" + + test "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagernotify/alertmanagernotifytest" + "github.com/prometheus/alertmanager/config" + "github.com/prometheus/alertmanager/notify" + "github.com/prometheus/alertmanager/types" +) + +// This is a test URL that has been modified to not be valid. +var testWebhookURL, _ = url.Parse("https://example.westeurope.logic.azure.com:443/workflows/xxx/triggers/manual/paths/invoke?api-version=2016-06-01&sp=%2Ftriggers%2Fmanual%2Frun&sv=1.0&sig=xxx") + +func TestMSTeamsV2Retry(t *testing.T) { + notifier, err := New( + &config.MSTeamsV2Config{ + WebhookURL: &config.SecretURL{URL: testWebhookURL}, + HTTPConfig: &commoncfg.HTTPClientConfig{}, + }, + test.CreateTmpl(t), + `{{ template "msteamsv2.default.titleLink" . }}`, + promslog.NewNopLogger(), + ) + require.NoError(t, err) + + for statusCode, expected := range test.RetryTests(test.DefaultRetryCodes()) { + actual, _ := notifier.retrier.Check(statusCode, nil) + require.Equal(t, expected, actual, "retry - error on status %d", statusCode) + } +} + +func TestNotifier_Notify_WithReason(t *testing.T) { + tests := []struct { + name string + statusCode int + responseContent string + expectedReason notify.Reason + noError bool + }{ + { + name: "with a 2xx status code and response 1", + statusCode: http.StatusOK, + responseContent: "1", + noError: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + notifier, err := New( + &config.MSTeamsV2Config{ + WebhookURL: &config.SecretURL{URL: testWebhookURL}, + HTTPConfig: &commoncfg.HTTPClientConfig{}, + }, + test.CreateTmpl(t), + `{{ template "msteamsv2.default.titleLink" . }}`, + promslog.NewNopLogger(), + ) + require.NoError(t, err) + + notifier.postJSONFunc = func(ctx context.Context, client *http.Client, url string, body io.Reader) (*http.Response, error) { + resp := httptest.NewRecorder() + _, err := resp.WriteString(tt.responseContent) + require.NoError(t, err) + resp.WriteHeader(tt.statusCode) + return resp.Result(), nil + } + ctx := context.Background() + ctx = notify.WithGroupKey(ctx, "1") + + alert1 := &types.Alert{ + Alert: model.Alert{ + StartsAt: time.Now(), + EndsAt: time.Now().Add(time.Hour), + }, + } + _, err = notifier.Notify(ctx, alert1) + if tt.noError { + require.NoError(t, err) + } else { + var reasonError *notify.ErrorWithReason + require.ErrorAs(t, err, &reasonError) + require.Equal(t, tt.expectedReason, reasonError.Reason) + } + }) + } +} + +func TestMSTeamsV2Templating(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + dec := json.NewDecoder(r.Body) + out := make(map[string]any) + err := dec.Decode(&out) + if err != nil { + panic(err) + } + })) + defer srv.Close() + u, _ := url.Parse(srv.URL) + + for _, tc := range []struct { + title string + cfg *config.MSTeamsV2Config + titleLink string + + retry bool + errMsg string + }{ + { + title: "full-blown message", + cfg: &config.MSTeamsV2Config{ + Title: `{{ template "msteams.default.title" . }}`, + Text: `{{ template "msteams.default.text" . }}`, + }, + titleLink: `{{ template "msteamsv2.default.titleLink" . }}`, + retry: false, + }, + { + title: "title with templating errors", + cfg: &config.MSTeamsV2Config{ + Title: "{{ ", + }, + titleLink: `{{ template "msteamsv2.default.titleLink" . }}`, + errMsg: "template: :1: unclosed action", + }, + { + title: "message with title link templating errors", + cfg: &config.MSTeamsV2Config{ + Title: `{{ template "msteams.default.title" . }}`, + Text: `{{ template "msteams.default.text" . }}`, + }, + titleLink: `{{ `, + errMsg: "template: :1: unclosed action", + }, + } { + t.Run(tc.title, func(t *testing.T) { + tc.cfg.WebhookURL = &config.SecretURL{URL: u} + tc.cfg.HTTPConfig = &commoncfg.HTTPClientConfig{} + pd, err := New(tc.cfg, test.CreateTmpl(t), tc.titleLink, promslog.NewNopLogger()) + require.NoError(t, err) + + ctx := context.Background() + ctx = notify.WithGroupKey(ctx, "1") + + ok, err := pd.Notify(ctx, []*types.Alert{ + { + Alert: model.Alert{ + Labels: model.LabelSet{ + "lbl1": "val1", + }, + StartsAt: time.Now(), + EndsAt: time.Now().Add(time.Hour), + }, + }, + }...) + if tc.errMsg == "" { + require.NoError(t, err) + } else { + require.Error(t, err) + require.Contains(t, err.Error(), tc.errMsg) + } + require.Equal(t, tc.retry, ok) + }) + } +} + +func TestMSTeamsV2RedactedURL(t *testing.T) { + ctx, u, fn := test.GetContextWithCancelingURL() + defer fn() + + secret := "secret" + notifier, err := New( + &config.MSTeamsV2Config{ + WebhookURL: &config.SecretURL{URL: u}, + HTTPConfig: &commoncfg.HTTPClientConfig{}, + }, + test.CreateTmpl(t), + `{{ template "msteamsv2.default.titleLink" . }}`, + promslog.NewNopLogger(), + ) + require.NoError(t, err) + + test.AssertNotifyLeaksNoSecret(ctx, t, notifier, secret) +} + +func TestMSTeamsV2ReadingURLFromFile(t *testing.T) { + ctx, u, fn := test.GetContextWithCancelingURL() + defer fn() + + f, err := os.CreateTemp("", "webhook_url") + require.NoError(t, err, "creating temp file failed") + _, err = f.WriteString(u.String() + "\n") + require.NoError(t, err, "writing to temp file failed") + + notifier, err := New( + &config.MSTeamsV2Config{ + WebhookURLFile: f.Name(), + HTTPConfig: &commoncfg.HTTPClientConfig{}, + }, + test.CreateTmpl(t), + `{{ template "msteamsv2.default.titleLink" . }}`, + promslog.NewNopLogger(), + ) + require.NoError(t, err) + + test.AssertNotifyLeaksNoSecret(ctx, t, notifier, u.String()) +} diff --git a/pkg/alertmanager/alertmanagernotify/receiver.go b/pkg/alertmanager/alertmanagernotify/receiver.go new file mode 100644 index 000000000000..33fd63f82454 --- /dev/null +++ b/pkg/alertmanager/alertmanagernotify/receiver.go @@ -0,0 +1,51 @@ +package alertmanagernotify + +import ( + "log/slog" + + "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagernotify/msteamsv2" + "github.com/SigNoz/signoz/pkg/types/alertmanagertypes" + "github.com/prometheus/alertmanager/config/receiver" + "github.com/prometheus/alertmanager/notify" + "github.com/prometheus/alertmanager/template" + "github.com/prometheus/alertmanager/types" +) + +func NewReceiverIntegrations(nc alertmanagertypes.Receiver, tmpl *template.Template, logger *slog.Logger) ([]notify.Integration, error) { + upstreamIntegrations, err := receiver.BuildReceiverIntegrations(nc, tmpl, logger) + if err != nil { + return nil, err + } + + var ( + errs types.MultiError + integrations []notify.Integration + add = func(name string, i int, rs notify.ResolvedSender, f func(l *slog.Logger) (notify.Notifier, error)) { + n, err := f(logger.With("integration", name)) + if err != nil { + errs.Add(err) + return + } + integrations = append(integrations, notify.NewIntegration(n, rs, name, i, nc.Name)) + } + ) + + for _, integration := range upstreamIntegrations { + // skip upstream msteamsv2 integration + if integration.Name() != "msteamsv2" { + integrations = append(integrations, integration) + } + } + + for i, c := range nc.MSTeamsV2Configs { + add("msteamsv2", i, c, func(l *slog.Logger) (notify.Notifier, error) { + return msteamsv2.New(c, tmpl, `{{ template "msteamsv2.default.titleLink" . }}`, l) + }) + } + + if errs.Len() > 0 { + return nil, &errs + } + + return integrations, nil +} diff --git a/pkg/alertmanager/alertmanagerserver/server.go b/pkg/alertmanager/alertmanagerserver/server.go index 55662340f528..86ea94570be0 100644 --- a/pkg/alertmanager/alertmanagerserver/server.go +++ b/pkg/alertmanager/alertmanagerserver/server.go @@ -7,6 +7,7 @@ import ( "sync" "time" + "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagernotify" "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/types/alertmanagertypes" "github.com/prometheus/alertmanager/dispatch" @@ -243,7 +244,7 @@ func (server *Server) SetConfig(ctx context.Context, alertmanagerConfig *alertma server.logger.InfoContext(ctx, "skipping creation of receiver not referenced by any route", "receiver", rcv.Name) continue } - integrations, err := alertmanagertypes.NewReceiverIntegrations(rcv, server.tmpl, server.logger) + integrations, err := alertmanagernotify.NewReceiverIntegrations(rcv, server.tmpl, server.logger) if err != nil { return err } @@ -316,7 +317,7 @@ func (server *Server) SetConfig(ctx context.Context, alertmanagerConfig *alertma } func (server *Server) TestReceiver(ctx context.Context, receiver alertmanagertypes.Receiver) error { - return alertmanagertypes.TestReceiver(ctx, receiver, server.alertmanagerConfig, server.tmpl, server.logger, alertmanagertypes.NewTestAlert(receiver, time.Now(), time.Now())) + return alertmanagertypes.TestReceiver(ctx, receiver, alertmanagernotify.NewReceiverIntegrations, server.alertmanagerConfig, server.tmpl, server.logger, alertmanagertypes.NewTestAlert(receiver, time.Now(), time.Now())) } func (server *Server) TestAlert(ctx context.Context, postableAlert *alertmanagertypes.PostableAlert, receivers []string) error { @@ -337,7 +338,7 @@ func (server *Server) TestAlert(ctx context.Context, postableAlert *alertmanager ch <- err return } - ch <- alertmanagertypes.TestReceiver(ctx, receiver, server.alertmanagerConfig, server.tmpl, server.logger, alerts[0]) + ch <- alertmanagertypes.TestReceiver(ctx, receiver, alertmanagernotify.NewReceiverIntegrations, server.alertmanagerConfig, server.tmpl, server.logger, alerts[0]) }(receiverName) } diff --git a/pkg/alertmanager/config.go b/pkg/alertmanager/config.go index 2b47c7bc8fff..d21bfb185108 100644 --- a/pkg/alertmanager/config.go +++ b/pkg/alertmanager/config.go @@ -2,9 +2,11 @@ package alertmanager import ( "net/url" + "strings" "time" "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagerserver" + "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/factory" ) @@ -14,9 +16,6 @@ type Config struct { // Internal is the internal alertmanager configuration. Signoz Signoz `mapstructure:"signoz" yaml:"signoz"` - - // Legacy is the legacy alertmanager configuration. - Legacy Legacy `mapstructure:"legacy"` } type Signoz struct { @@ -38,14 +37,7 @@ func NewConfigFactory() factory.ConfigFactory { func newConfig() factory.Config { return Config{ - Provider: "legacy", - Legacy: Legacy{ - ApiURL: &url.URL{ - Scheme: "http", - Host: "alertmanager:9093", - Path: "/api", - }, - }, + Provider: "signoz", Signoz: Signoz{ PollInterval: 1 * time.Minute, Config: alertmanagerserver.NewConfig(), @@ -54,5 +46,9 @@ func newConfig() factory.Config { } func (c Config) Validate() error { + if c.Provider != "signoz" { + return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "provider must be one of [%s], got %s", strings.Join([]string{"signoz"}, ", "), c.Provider) + } + return nil } diff --git a/pkg/alertmanager/config_test.go b/pkg/alertmanager/config_test.go index 30b35aac3df9..4ea9033c6284 100644 --- a/pkg/alertmanager/config_test.go +++ b/pkg/alertmanager/config_test.go @@ -15,7 +15,7 @@ import ( ) func TestNewWithEnvProvider(t *testing.T) { - t.Setenv("SIGNOZ_ALERTMANAGER_PROVIDER", "legacy") + t.Setenv("SIGNOZ_ALERTMANAGER_PROVIDER", "signoz") t.Setenv("SIGNOZ_ALERTMANAGER_LEGACY_API__URL", "http://localhost:9093/api") t.Setenv("SIGNOZ_ALERTMANAGER_SIGNOZ_ROUTE_REPEAT__INTERVAL", "5m") t.Setenv("SIGNOZ_ALERTMANAGER_SIGNOZ_EXTERNAL__URL", "https://example.com/test") @@ -49,15 +49,8 @@ func TestNewWithEnvProvider(t *testing.T) { } expected := &Config{ - Provider: "legacy", - Legacy: Legacy{ - ApiURL: &url.URL{ - Scheme: "http", - Host: "localhost:9093", - Path: "/api", - }, - }, - Signoz: def.Signoz, + Provider: "signoz", + Signoz: def.Signoz, } assert.Equal(t, expected, actual) diff --git a/pkg/alertmanager/legacyalertmanager/provider.go b/pkg/alertmanager/legacyalertmanager/provider.go deleted file mode 100644 index 13137e196c08..000000000000 --- a/pkg/alertmanager/legacyalertmanager/provider.go +++ /dev/null @@ -1,482 +0,0 @@ -package legacyalertmanager - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "net/url" - "time" - - "github.com/SigNoz/signoz/pkg/alertmanager" - "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagerbatcher" - "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagerstore/sqlalertmanagerstore" - "github.com/SigNoz/signoz/pkg/factory" - "github.com/SigNoz/signoz/pkg/modules/organization" - "github.com/SigNoz/signoz/pkg/sqlstore" - "github.com/SigNoz/signoz/pkg/types/alertmanagertypes" - "github.com/SigNoz/signoz/pkg/valuer" - "github.com/tidwall/gjson" -) - -type postableAlert struct { - *alertmanagertypes.PostableAlert - Receivers []string `json:"receivers"` -} - -func (pa *postableAlert) MarshalJSON() ([]byte, error) { - // Marshal the embedded PostableAlert to get its JSON representation. - alertJSON, err := json.Marshal(pa.PostableAlert) - if err != nil { - return nil, err - } - - // Unmarshal that JSON into a map so we can add extra fields. - var m map[string]interface{} - if err := json.Unmarshal(alertJSON, &m); err != nil { - return nil, err - } - - // Add the Receivers field. - m["receivers"] = pa.Receivers - - return json.Marshal(m) -} - -const ( - alertsPath string = "/v1/alerts" - routesPath string = "/v1/routes" - testReceiverPath string = "/v1/testReceiver" -) - -type provider struct { - config alertmanager.Config - settings factory.ScopedProviderSettings - client *http.Client - configStore alertmanagertypes.ConfigStore - batcher *alertmanagerbatcher.Batcher - url *url.URL - orgGetter organization.Getter - orgID string -} - -func NewFactory(sqlstore sqlstore.SQLStore, orgGetter organization.Getter) factory.ProviderFactory[alertmanager.Alertmanager, alertmanager.Config] { - return factory.NewProviderFactory(factory.MustNewName("legacy"), func(ctx context.Context, settings factory.ProviderSettings, config alertmanager.Config) (alertmanager.Alertmanager, error) { - return New(ctx, settings, config, sqlstore, orgGetter) - }) -} - -func New(ctx context.Context, providerSettings factory.ProviderSettings, config alertmanager.Config, sqlstore sqlstore.SQLStore, orgGetter organization.Getter) (*provider, error) { - settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/alertmanager/legacyalertmanager") - configStore := sqlalertmanagerstore.NewConfigStore(sqlstore) - - return &provider{ - config: config, - settings: settings, - client: &http.Client{ - Timeout: 30 * time.Second, - }, - configStore: configStore, - batcher: alertmanagerbatcher.New(settings.Logger(), alertmanagerbatcher.NewConfig()), - url: config.Legacy.ApiURL, - orgGetter: orgGetter, - }, nil -} - -func (provider *provider) Start(ctx context.Context) error { - err := provider.batcher.Start(ctx) - if err != nil { - return err - } - - for alerts := range provider.batcher.C { - // For the first time, we need to get the orgID from the config store. - // Since this is the legacy alertmanager, we get the first org from the store. - if provider.orgID == "" { - orgIDs, err := provider.orgGetter.ListByOwnedKeyRange(ctx) - if err != nil { - provider.settings.Logger().ErrorContext(ctx, "failed to send alerts to alertmanager", "error", err) - continue - } - - if len(orgIDs) == 0 { - provider.settings.Logger().ErrorContext(ctx, "failed to send alerts to alertmanager", "error", "no orgs found") - continue - } - - provider.orgID = orgIDs[0].ID.String() - } - - if err := provider.putAlerts(ctx, provider.orgID, alerts); err != nil { - provider.settings.Logger().ErrorContext(ctx, "failed to send alerts to alertmanager", "error", err) - } - } - - return nil -} - -func (provider *provider) GetAlerts(ctx context.Context, orgID string, params alertmanagertypes.GettableAlertsParams) (alertmanagertypes.DeprecatedGettableAlerts, error) { - url := provider.url.JoinPath(alertsPath) - url.RawQuery = params.RawQuery - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, url.String(), nil) - if err != nil { - return nil, err - } - req.Header.Add("Content-Type", "application/json") - - resp, err := provider.client.Do(req) - if err != nil { - return nil, err - } - - defer resp.Body.Close() //nolint:errcheck - body, err := io.ReadAll(resp.Body) - if err != nil { - return nil, err - } - - if resp.StatusCode/100 != 2 { - return nil, fmt.Errorf("bad response status %v", resp.Status) - } - - var alerts alertmanagertypes.DeprecatedGettableAlerts - if err := json.Unmarshal([]byte(gjson.GetBytes(body, "data").Raw), &alerts); err != nil { - return nil, err - } - - return alerts, nil -} - -func (provider *provider) PutAlerts(ctx context.Context, orgID string, alerts alertmanagertypes.PostableAlerts) error { - provider.batcher.Add(ctx, alerts...) - return nil -} - -func (provider *provider) putAlerts(ctx context.Context, orgID string, alerts alertmanagertypes.PostableAlerts) error { - cfg, err := provider.configStore.Get(ctx, orgID) - if err != nil { - return err - } - - var legacyAlerts []postableAlert - for _, alert := range alerts { - ruleID, ok := alert.Alert.Labels[alertmanagertypes.RuleIDMatcherName] - if !ok { - provider.settings.Logger().WarnContext(ctx, "cannot find ruleID for alert, skipping sending alert to alertmanager", "alert", alert) - continue - } - - receivers := cfg.ReceiverNamesFromRuleID(ruleID) - if len(receivers) == 0 { - provider.settings.Logger().WarnContext(ctx, "cannot find receivers for alert, skipping sending alert to alertmanager", "rule_id", ruleID, "alert", alert) - continue - } - - legacyAlerts = append(legacyAlerts, postableAlert{ - PostableAlert: alert, - Receivers: receivers, - }) - } - - url := provider.url.JoinPath(alertsPath) - - body, err := json.Marshal(legacyAlerts) - if err != nil { - return err - } - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, url.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } - req.Header.Add("Content-Type", "application/json") - - resp, err := provider.client.Do(req) - if err != nil { - return err - } - - defer resp.Body.Close() //nolint:errcheck - - // Any HTTP status 2xx is OK. - if resp.StatusCode/100 != 2 { - return fmt.Errorf("bad response status %v", resp.Status) - } - - return nil -} - -func (provider *provider) TestReceiver(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver) error { - url := provider.url.JoinPath(testReceiverPath) - - body, err := json.Marshal(alertmanagertypes.MSTeamsV2ReceiverToMSTeamsReceiver(receiver)) - if err != nil { - return err - } - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, url.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } - req.Header.Add("Content-Type", "application/json") - - resp, err := provider.client.Do(req) - if err != nil { - return err - } - - defer resp.Body.Close() //nolint:errcheck - - // Any HTTP status 2xx is OK. - if resp.StatusCode/100 != 2 { - return fmt.Errorf("bad response status %v", resp.Status) - } - - return nil -} - -func (provider *provider) TestAlert(ctx context.Context, orgID string, alert *alertmanagertypes.PostableAlert, receivers []string) error { - url := provider.url.JoinPath(alertsPath) - - legacyAlerts := make([]postableAlert, 1) - legacyAlerts[0] = postableAlert{ - PostableAlert: alert, - Receivers: receivers, - } - - body, err := json.Marshal(legacyAlerts) - if err != nil { - return err - } - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, url.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } - req.Header.Add("Content-Type", "application/json") - - resp, err := provider.client.Do(req) - if err != nil { - return err - } - - defer resp.Body.Close() //nolint:errcheck - - // Any HTTP status 2xx is OK. - if resp.StatusCode/100 != 2 { - return fmt.Errorf("bad response status %v", resp.Status) - } - - return nil -} - -func (provider *provider) ListChannels(ctx context.Context, orgID string) ([]*alertmanagertypes.Channel, error) { - return provider.configStore.ListChannels(ctx, orgID) -} - -func (provider *provider) ListAllChannels(ctx context.Context) ([]*alertmanagertypes.Channel, error) { - channels, err := provider.configStore.ListAllChannels(ctx) - if err != nil { - return nil, err - } - - for _, channel := range channels { - if err := channel.MSTeamsV2ToMSTeams(); err != nil { - return nil, err - } - } - - return channels, nil -} - -func (provider *provider) GetChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) (*alertmanagertypes.Channel, error) { - return provider.configStore.GetChannelByID(ctx, orgID, channelID) -} - -func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver, id valuer.UUID) error { - channel, err := provider.configStore.GetChannelByID(ctx, orgID, id) - if err != nil { - return err - } - - err = channel.Update(receiver) - if err != nil { - return err - } - - config, err := provider.configStore.Get(ctx, orgID) - if err != nil { - return err - } - - if err := config.UpdateReceiver(receiver); err != nil { - return err - } - - err = provider.configStore.UpdateChannel(ctx, orgID, channel, alertmanagertypes.WithCb(func(ctx context.Context) error { - url := provider.url.JoinPath(routesPath) - - body, err := json.Marshal(alertmanagertypes.MSTeamsV2ReceiverToMSTeamsReceiver(receiver)) - if err != nil { - return err - } - - req, err := http.NewRequestWithContext(ctx, http.MethodPut, url.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } - req.Header.Add("Content-Type", "application/json") - - resp, err := provider.client.Do(req) - if err != nil { - return err - } - - defer resp.Body.Close() //nolint:errcheck - - // Any HTTP status 2xx is OK. - if resp.StatusCode/100 != 2 { - return fmt.Errorf("bad response status %v", resp.Status) - } - - if err := provider.configStore.Set(ctx, config); err != nil { - return err - } - - return nil - })) - if err != nil { - return err - } - - return nil -} - -func (provider *provider) CreateChannel(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver) error { - channel := alertmanagertypes.NewChannelFromReceiver(receiver, orgID) - - config, err := provider.configStore.Get(ctx, orgID) - if err != nil { - return err - } - - if err := config.CreateReceiver(receiver); err != nil { - return err - } - - return provider.configStore.CreateChannel(ctx, channel, alertmanagertypes.WithCb(func(ctx context.Context) error { - url := provider.url.JoinPath(routesPath) - - body, err := json.Marshal(alertmanagertypes.MSTeamsV2ReceiverToMSTeamsReceiver(receiver)) - if err != nil { - return err - } - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, url.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } - req.Header.Add("Content-Type", "application/json") - - resp, err := provider.client.Do(req) - if err != nil { - return err - } - - defer resp.Body.Close() //nolint:errcheck - - // Any HTTP status 2xx is OK. - if resp.StatusCode/100 != 2 { - return fmt.Errorf("bad response status %v", resp.Status) - } - - if err := provider.configStore.Set(ctx, config); err != nil { - return err - } - - return nil - })) -} - -func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) error { - channel, err := provider.configStore.GetChannelByID(ctx, orgID, channelID) - if err != nil { - return err - } - - config, err := provider.configStore.Get(ctx, orgID) - if err != nil { - return err - } - - if err := config.DeleteReceiver(channel.Name); err != nil { - return err - } - - return provider.configStore.DeleteChannelByID(ctx, orgID, channelID, alertmanagertypes.WithCb(func(ctx context.Context) error { - url := provider.url.JoinPath(routesPath) - - body, err := json.Marshal(map[string]string{"name": channel.Name}) - if err != nil { - return err - } - - req, err := http.NewRequestWithContext(ctx, http.MethodDelete, url.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } - req.Header.Add("Content-Type", "application/json") - - resp, err := provider.client.Do(req) - if err != nil { - return err - } - - defer resp.Body.Close() //nolint:errcheck - - // Any HTTP status 2xx is OK. - if resp.StatusCode/100 != 2 { - return fmt.Errorf("bad response status %v", resp.Status) - } - - if err := provider.configStore.Set(ctx, config); err != nil { - return err - } - - return nil - })) -} - -func (provider *provider) SetConfig(ctx context.Context, config *alertmanagertypes.Config) error { - return provider.configStore.Set(ctx, config) -} - -func (provider *provider) Stop(ctx context.Context) error { - provider.batcher.Stop(ctx) - return nil -} - -func (provider *provider) GetConfig(ctx context.Context, orgID string) (*alertmanagertypes.Config, error) { - return provider.configStore.Get(ctx, orgID) -} - -func (provider *provider) SetDefaultConfig(ctx context.Context, orgID string) error { - config, err := alertmanagertypes.NewDefaultConfig(provider.config.Signoz.Config.Global, provider.config.Signoz.Config.Route, orgID) - if err != nil { - return err - } - - return provider.configStore.Set(ctx, config) -} - -func (provider *provider) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) { - channels, err := provider.configStore.ListChannels(ctx, orgID.String()) - if err != nil { - return nil, err - } - - return alertmanagertypes.NewStatsFromChannels(channels), nil -} diff --git a/pkg/alertmanager/legacyalertmanager/provider_test.go b/pkg/alertmanager/legacyalertmanager/provider_test.go deleted file mode 100644 index e6f2138be32e..000000000000 --- a/pkg/alertmanager/legacyalertmanager/provider_test.go +++ /dev/null @@ -1,35 +0,0 @@ -package legacyalertmanager - -import ( - "encoding/json" - "testing" - - "github.com/SigNoz/signoz/pkg/types/alertmanagertypes" - "github.com/prometheus/alertmanager/api/v2/models" - "github.com/stretchr/testify/assert" -) - -func TestProvider_TestAlert(t *testing.T) { - pa := &postableAlert{ - PostableAlert: &alertmanagertypes.PostableAlert{ - Alert: models.Alert{ - Labels: models.LabelSet{ - "alertname": "test", - }, - GeneratorURL: "http://localhost:9090/graph?g0.expr=up&g0.tab=1", - }, - Annotations: models.LabelSet{ - "summary": "test", - }, - }, - Receivers: []string{"receiver1", "receiver2"}, - } - - body, err := json.Marshal(pa) - if err != nil { - t.Fatalf("failed to marshal postable alert: %v", err) - } - - assert.Contains(t, string(body), "receiver1") - assert.Contains(t, string(body), "receiver2") -} diff --git a/pkg/query-service/app/http_handler.go b/pkg/query-service/app/http_handler.go index 67e9a9576a80..eba029961078 100644 --- a/pkg/query-service/app/http_handler.go +++ b/pkg/query-service/app/http_handler.go @@ -84,9 +84,8 @@ import ( type status string const ( - statusSuccess status = "success" - statusError status = "error" - defaultFluxInterval = 5 * time.Minute + statusSuccess status = "success" + statusError status = "error" ) // NewRouter creates and configures a Gorilla Router. @@ -480,11 +479,6 @@ func (aH *APIHandler) Respond(w http.ResponseWriter, data interface{}) { writeHttpResponse(w, data) } -// RegisterPrivateRoutes registers routes for this handler on the given router -func (aH *APIHandler) RegisterPrivateRoutes(router *mux.Router) { - router.HandleFunc("/api/v1/channels", aH.AlertmanagerAPI.ListAllChannels).Methods(http.MethodGet) -} - // RegisterRoutes registers routes for this handler on the given router func (aH *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) { router.HandleFunc("/api/v1/query_range", am.ViewAccess(aH.queryRangeMetrics)).Methods(http.MethodGet) diff --git a/pkg/query-service/app/server.go b/pkg/query-service/app/server.go index b33073eb74fc..b81db76d1cce 100644 --- a/pkg/query-service/app/server.go +++ b/pkg/query-service/app/server.go @@ -54,11 +54,6 @@ type Server struct { httpServer *http.Server httpHostPort string - // private http - privateConn net.Listener - privateHTTP *http.Server - privateHostPort string - opampServer *opamp.Server unavailableChannel chan healthcheck.Status @@ -131,7 +126,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) jwt: jwt, ruleManager: rm, httpHostPort: constants.HTTPHostPort, - privateHostPort: constants.PrivateHostPort, unavailableChannel: make(chan healthcheck.Status), } @@ -143,13 +137,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) s.httpServer = httpServer - privateServer, err := s.createPrivateServer(apiHandler) - if err != nil { - return nil, err - } - - s.privateHTTP = privateServer - opAmpModel.Init(signoz.SQLStore, signoz.Instrumentation.Logger(), signoz.Modules.OrgGetter) agentConfMgr, err := agentConf.Initiate( @@ -178,37 +165,6 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status { return s.unavailableChannel } -func (s *Server) createPrivateServer(api *APIHandler) (*http.Server, error) { - - r := NewRouter() - - r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap) - r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(), - s.config.APIServer.Timeout.ExcludedRoutes, - s.config.APIServer.Timeout.Default, - s.config.APIServer.Timeout.Max, - ).Wrap) - r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap) - r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap) - - api.RegisterPrivateRoutes(r) - - c := cors.New(cors.Options{ - //todo(amol): find out a way to add exact domain or - // ip here for alert manager - AllowedOrigins: []string{"*"}, - AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH"}, - AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "X-SIGNOZ-QUERY-ID", "Sec-WebSocket-Protocol"}, - }) - - handler := c.Handler(r) - handler = handlers.CompressHandler(handler) - - return &http.Server{ - Handler: handler, - }, nil -} - func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server, error) { r := NewRouter() @@ -275,19 +231,6 @@ func (s *Server) initListeners() error { zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort)) - // listen on private port to support internal services - privateHostPort := s.privateHostPort - - if privateHostPort == "" { - return fmt.Errorf("constants.PrivateHostPort is required") - } - - s.privateConn, err = net.Listen("tcp", privateHostPort) - if err != nil { - return err - } - zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.privateHostPort)) - return nil } @@ -326,26 +269,6 @@ func (s *Server) Start(ctx context.Context) error { } }() - var privatePort int - if port, err := utils.GetPort(s.privateConn.Addr()); err == nil { - privatePort = port - } - fmt.Println("starting private http") - go func() { - zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.privateHostPort)) - - switch err := s.privateHTTP.Serve(s.privateConn); err { - case nil, http.ErrServerClosed, cmux.ErrListenerClosed: - // normal exit, nothing to do - zap.L().Info("private http server closed") - default: - zap.L().Error("Could not start private HTTP server", zap.Error(err)) - } - - s.unavailableChannel <- healthcheck.Unavailable - - }() - go func() { zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", constants.OpAmpWsEndpoint)) err := s.opampServer.Start(constants.OpAmpWsEndpoint) @@ -365,12 +288,6 @@ func (s *Server) Stop(ctx context.Context) error { } } - if s.privateHTTP != nil { - if err := s.privateHTTP.Shutdown(context.Background()); err != nil { - return err - } - } - s.opampServer.Stop() if s.ruleManager != nil { diff --git a/pkg/signoz/config.go b/pkg/signoz/config.go index 5d568dda8ae9..3e4377254701 100644 --- a/pkg/signoz/config.go +++ b/pkg/signoz/config.go @@ -239,20 +239,6 @@ func mergeAndEnsureBackwardCompatibility(ctx context.Context, logger *slog.Logge config.TelemetryStore.Connection.DialTimeout = deprecatedFlags.DialTimeout } - if os.Getenv("ALERTMANAGER_API_PREFIX") != "" { - logger.WarnContext(ctx, "[Deprecated] env ALERTMANAGER_API_PREFIX is deprecated and scheduled for removal. Please use SIGNOZ_ALERTMANAGER_LEGACY_API__URL instead.") - u, err := url.Parse(os.Getenv("ALERTMANAGER_API_PREFIX")) - if err != nil { - logger.WarnContext(ctx, "Error parsing ALERTMANAGER_API_PREFIX, using default value") - } else { - config.Alertmanager.Legacy.ApiURL = u - } - } - - if os.Getenv("ALERTMANAGER_API_CHANNEL_PATH") != "" { - logger.WarnContext(ctx, "[Deprecated] env ALERTMANAGER_API_CHANNEL_PATH is deprecated and scheduled for complete removal.") - } - if deprecatedFlags.Config != "" { logger.WarnContext(ctx, "[Deprecated] flag --config is deprecated for passing prometheus config. The flag will be used for passing the entire SigNoz config. More details can be found at https://github.com/SigNoz/signoz/issues/6805.") } diff --git a/pkg/signoz/provider.go b/pkg/signoz/provider.go index c307d8c050ab..878ec34a6c90 100644 --- a/pkg/signoz/provider.go +++ b/pkg/signoz/provider.go @@ -2,7 +2,6 @@ package signoz import ( "github.com/SigNoz/signoz/pkg/alertmanager" - "github.com/SigNoz/signoz/pkg/alertmanager/legacyalertmanager" "github.com/SigNoz/signoz/pkg/alertmanager/signozalertmanager" "github.com/SigNoz/signoz/pkg/analytics" "github.com/SigNoz/signoz/pkg/analytics/noopanalytics" @@ -156,7 +155,6 @@ func NewPrometheusProviderFactories(telemetryStore telemetrystore.TelemetryStore func NewAlertmanagerProviderFactories(sqlstore sqlstore.SQLStore, orgGetter organization.Getter) factory.NamedMap[factory.ProviderFactory[alertmanager.Alertmanager, alertmanager.Config]] { return factory.MustNewNamedMap( - legacyalertmanager.NewFactory(sqlstore, orgGetter), signozalertmanager.NewFactory(sqlstore, orgGetter), ) } diff --git a/pkg/types/alertmanagertypes/channel.go b/pkg/types/alertmanagertypes/channel.go index 1cb873ecb15d..bbb50f24adf8 100644 --- a/pkg/types/alertmanagertypes/channel.go +++ b/pkg/types/alertmanagertypes/channel.go @@ -177,26 +177,3 @@ func (c *Channel) Update(receiver Receiver) error { return nil } - -// This is needed by the legacy alertmanager to convert the MSTeamsV2Configs to MSTeamsConfigs -func (c *Channel) MSTeamsV2ToMSTeams() error { - if c.Type != "msteamsv2" { - return nil - } - - receiver, err := NewReceiver(c.Data) - if err != nil { - return err - } - - receiver = MSTeamsV2ReceiverToMSTeamsReceiver(receiver) - data, err := json.Marshal(receiver) - if err != nil { - return err - } - - c.Type = "msteams" - c.Data = string(data) - - return nil -} diff --git a/pkg/types/alertmanagertypes/receiver.go b/pkg/types/alertmanagertypes/receiver.go index bbdfaf28d7a8..83cae2931b8d 100644 --- a/pkg/types/alertmanagertypes/receiver.go +++ b/pkg/types/alertmanagertypes/receiver.go @@ -13,12 +13,12 @@ import ( "gopkg.in/yaml.v2" "github.com/prometheus/alertmanager/config" - "github.com/prometheus/alertmanager/config/receiver" ) type ( // Receiver is the type for the receiver configuration. - Receiver = config.Receiver + Receiver = config.Receiver + ReceiverIntegrationsFunc = func(nc Receiver, tmpl *template.Template, logger *slog.Logger) ([]notify.Integration, error) ) // Creates a new receiver from a string. The input is initialized with the default values from the upstream alertmanager. @@ -49,11 +49,7 @@ func NewReceiver(input string) (Receiver, error) { return receiverWithDefaults, nil } -func NewReceiverIntegrations(nc Receiver, tmpl *template.Template, logger *slog.Logger) ([]notify.Integration, error) { - return receiver.BuildReceiverIntegrations(nc, tmpl, logger) -} - -func TestReceiver(ctx context.Context, receiver Receiver, config *Config, tmpl *template.Template, logger *slog.Logger, alert *Alert) error { +func TestReceiver(ctx context.Context, receiver Receiver, receiverIntegrationsFunc ReceiverIntegrationsFunc, config *Config, tmpl *template.Template, logger *slog.Logger, alert *Alert) error { ctx = notify.WithGroupKey(ctx, fmt.Sprintf("%s-%s-%d", receiver.Name, alert.Labels.Fingerprint(), time.Now().Unix())) ctx = notify.WithGroupLabels(ctx, alert.Labels) ctx = notify.WithReceiverName(ctx, receiver.Name) @@ -75,7 +71,7 @@ func TestReceiver(ctx context.Context, receiver Receiver, config *Config, tmpl * return err } - integrations, err := NewReceiverIntegrations(receiver, tmpl, logger) + integrations, err := receiverIntegrationsFunc(receiver, tmpl, logger) if err != nil { return err } @@ -90,27 +86,3 @@ func TestReceiver(ctx context.Context, receiver Receiver, config *Config, tmpl * return nil } - -// This is needed by the legacy alertmanager to convert the MSTeamsV2Configs to MSTeamsConfigs -func MSTeamsV2ReceiverToMSTeamsReceiver(receiver Receiver) Receiver { - if receiver.MSTeamsV2Configs == nil { - return receiver - } - - var msTeamsConfigs []*config.MSTeamsConfig - for _, cfg := range receiver.MSTeamsV2Configs { - msTeamsConfigs = append(msTeamsConfigs, &config.MSTeamsConfig{ - NotifierConfig: cfg.NotifierConfig, - HTTPConfig: cfg.HTTPConfig, - WebhookURL: cfg.WebhookURL, - WebhookURLFile: cfg.WebhookURLFile, - Title: cfg.Title, - Text: cfg.Text, - }) - } - - receiver.MSTeamsV2Configs = nil - receiver.MSTeamsConfigs = msTeamsConfigs - - return receiver -} diff --git a/pkg/types/alertmanagertypes/template.go b/pkg/types/alertmanagertypes/template.go index 7de8a36acf64..8767eb7bd802 100644 --- a/pkg/types/alertmanagertypes/template.go +++ b/pkg/types/alertmanagertypes/template.go @@ -21,6 +21,7 @@ func FromGlobs(paths []string) (*alertmanagertemplate.Template, error) { {{ define "__ruleIdPath" }}{{ range .CommonLabels.SortedPairs }}{{ if eq .Name "ruleId" }}{{ if match "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$" .Value }}/edit?ruleId={{ .Value | urlquery }}{{ end }}{{ end }}{{ end }}{{ end }} {{ define "__alertmanagerURL" }}{{ .ExternalURL }}/alerts{{ template "__ruleIdPath" . }}{{ end }} + {{ define "msteamsv2.default.titleLink" }}{{ template "__alertmanagerURL" . }}{{ end }} `))); err != nil { return nil, fmt.Errorf("error parsing alertmanager templates: %w", err) } From b1ea7eab708128f9faddb7c3f997facdfc55e03c Mon Sep 17 00:00:00 2001 From: Abhi kumar Date: Wed, 10 Sep 2025 19:53:06 +0530 Subject: [PATCH 12/51] chore: automatically show query addon when the value is present even after refresh (#9024) * chore: automatically show query addon when the value is present even after refresh * chore: minor cleanup * test: added tests for queryAddon * test: removed inputwithlabel mock --- .../InputWithLabel/InputWithLabel.tsx | 1 + .../QueryV2/QueryAddOns/QueryAddOns.tsx | 47 +++-- .../__tests__/QueryAddOns.test.tsx | 186 ++++++++++++++++++ 3 files changed, 220 insertions(+), 14 deletions(-) create mode 100644 frontend/src/components/QueryBuilderV2/__tests__/QueryAddOns.test.tsx diff --git a/frontend/src/components/InputWithLabel/InputWithLabel.tsx b/frontend/src/components/InputWithLabel/InputWithLabel.tsx index 0e089acea986..a95318fe9cdf 100644 --- a/frontend/src/components/InputWithLabel/InputWithLabel.tsx +++ b/frontend/src/components/InputWithLabel/InputWithLabel.tsx @@ -49,6 +49,7 @@ function InputWithLabel({ value={inputValue} onChange={handleChange} name={label.toLowerCase()} + data-testid={`input-${label}`} /> {labelAfter && {label}} {onClose && ( diff --git a/frontend/src/components/QueryBuilderV2/QueryV2/QueryAddOns/QueryAddOns.tsx b/frontend/src/components/QueryBuilderV2/QueryV2/QueryAddOns/QueryAddOns.tsx index 997390f20989..f0fecfea2cbc 100644 --- a/frontend/src/components/QueryBuilderV2/QueryV2/QueryAddOns/QueryAddOns.tsx +++ b/frontend/src/components/QueryBuilderV2/QueryV2/QueryAddOns/QueryAddOns.tsx @@ -9,7 +9,7 @@ import { OrderByFilter } from 'container/QueryBuilder/filters/OrderByFilter/Orde import { ReduceToFilter } from 'container/QueryBuilder/filters/ReduceToFilter/ReduceToFilter'; import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations'; -import { isEmpty } from 'lodash-es'; +import { get, isEmpty } from 'lodash-es'; import { BarChart2, ChevronUp, ExternalLink, ScrollText } from 'lucide-react'; import { useCallback, useEffect, useState } from 'react'; import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData'; @@ -34,6 +34,14 @@ const ADD_ONS_KEYS = { LEGEND_FORMAT: 'legend_format', }; +const ADD_ONS_KEYS_TO_QUERY_PATH = { + [ADD_ONS_KEYS.GROUP_BY]: 'groupBy', + [ADD_ONS_KEYS.HAVING]: 'having.expression', + [ADD_ONS_KEYS.ORDER_BY]: 'orderBy', + [ADD_ONS_KEYS.LIMIT]: 'limit', + [ADD_ONS_KEYS.LEGEND_FORMAT]: 'legend', +}; + const ADD_ONS = [ { icon: , @@ -91,6 +99,9 @@ const REDUCE_TO = { 'https://signoz.io/docs/userguide/query-builder-v5/#reduce-operations', }; +const hasValue = (value: unknown): boolean => + value != null && value !== '' && !(Array.isArray(value) && value.length === 0); + // Custom tooltip content component function TooltipContent({ label, @@ -195,21 +206,29 @@ function QueryAddOns({ } } - // add reduce to if showReduceTo is true if (showReduceTo) { filteredAddOns = [...filteredAddOns, REDUCE_TO]; } - setAddOns(filteredAddOns); - // Filter selectedViews to only include add-ons present in filteredAddOns - setSelectedViews((prevSelectedViews) => - prevSelectedViews.filter((view) => - filteredAddOns.some((addOn) => addOn.key === view.key), + const activeAddOnKeys = new Set( + Object.entries(ADD_ONS_KEYS_TO_QUERY_PATH) + .filter(([, path]) => hasValue(get(query, path))) + .map(([key]) => key), + ); + + const availableAddOnKeys = new Set(filteredAddOns.map((addOn) => addOn.key)); + + // Filter and set selected views: add-ons that are both active and available + setSelectedViews( + ADD_ONS.filter( + (addOn) => + activeAddOnKeys.has(addOn.key) && availableAddOnKeys.has(addOn.key), ), ); + // eslint-disable-next-line react-hooks/exhaustive-deps - }, [panelType, isListViewPanel, query.dataSource]); + }, [panelType, isListViewPanel, query]); const handleOptionClick = (e: RadioChangeEvent): void => { if (selectedViews.find((view) => view.key === e.target.value.key)) { @@ -285,7 +304,7 @@ function QueryAddOns({ {selectedViews.length > 0 && (
{selectedViews.find((view) => view.key === 'group_by') && ( -
+
)} {selectedViews.find((view) => view.key === 'having') && ( -
+
)} {selectedViews.find((view) => view.key === 'limit') && ( -
+
)} {selectedViews.find((view) => view.key === 'order_by') && ( -
+
view.key === 'reduce_to') && showReduceTo && ( -
+
view.key === 'legend_format') && ( -
+
({ + useQueryOperations: () => ({ + handleChangeQueryData: mockHandleChangeQueryData, + }), +})); + +jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({ + useQueryBuilder: () => ({ + handleSetQueryData: mockHandleSetQueryData, + }), +})); + +jest.mock('container/QueryBuilder/filters/GroupByFilter/GroupByFilter', () => ({ + GroupByFilter: ({ onChange }: any) => ( + + ), +})); + +jest.mock('container/QueryBuilder/filters/OrderByFilter/OrderByFilter', () => ({ + OrderByFilter: ({ onChange }: any) => ( + + ), +})); + +jest.mock('../QueryV2/QueryAddOns/HavingFilter/HavingFilter', () => ({ + __esModule: true, + default: ({ onChange, onClose }: any) => ( +
+ + +
+ ), +})); + +jest.mock( + 'container/QueryBuilder/filters/ReduceToFilter/ReduceToFilter', + () => ({ + ReduceToFilter: ({ onChange }: any) => ( + + ), + }), +); + +function baseQuery(overrides: Partial = {}): any { + return { + dataSource: DataSource.TRACES, + aggregations: [{ id: 'a', operator: 'count' }], + groupBy: [], + orderBy: [], + legend: '', + limit: null, + having: { expression: '' }, + ...overrides, + }; +} + +describe('QueryAddOns', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('VALUE panel: no sections auto-open when query has no active add-ons', () => { + render( + , + ); + + expect(screen.queryByTestId('legend-format-content')).not.toBeInTheDocument(); + expect(screen.queryByTestId('reduce-to-content')).not.toBeInTheDocument(); + expect(screen.queryByTestId('order-by-content')).not.toBeInTheDocument(); + expect(screen.queryByTestId('limit-content')).not.toBeInTheDocument(); + expect(screen.queryByTestId('group-by-content')).not.toBeInTheDocument(); + expect(screen.queryByTestId('having-content')).not.toBeInTheDocument(); + }); + + it('hides group-by section for METRICS even if groupBy is set in query', () => { + render( + , + ); + + expect(screen.queryByTestId('group-by-content')).not.toBeInTheDocument(); + }); + + it('defaults to Order By open in list view panel', () => { + render( + , + ); + + expect(screen.getByTestId('order-by-content')).toBeInTheDocument(); + }); + + it('limit input auto-opens when limit is set and changing it calls handler', () => { + render( + , + ); + + const input = screen.getByTestId('input-Limit') as HTMLInputElement; + expect(screen.getByTestId('limit-content')).toBeInTheDocument(); + expect(input.value).toBe('5'); + + fireEvent.change(input, { target: { value: '10' } }); + expect(mockHandleChangeQueryData).toHaveBeenCalledWith('limit', 10); + }); + + it('auto-opens Order By and Limit when present in query', () => { + const query = baseQuery({ + orderBy: [{ columnName: 'duration', order: 'desc' }], + limit: 7, + }); + render( + , + ); + + expect(screen.getByTestId('order-by-content')).toBeInTheDocument(); + const limitInput = screen.getByTestId('input-Limit') as HTMLInputElement; + expect(screen.getByTestId('limit-content')).toBeInTheDocument(); + expect(limitInput.value).toBe('7'); + }); +}); From 0658c561b971e17c3b3e2229dbbb2a3cefc84212 Mon Sep 17 00:00:00 2001 From: Amlan Kumar Nandy <45410599+amlannandy@users.noreply.github.com> Date: Wed, 10 Sep 2025 21:43:54 +0700 Subject: [PATCH 13/51] chore: add query section to create alerts (#8991) --- .../__tests__/CreateAlertHeader.test.tsx | 21 ++ .../CreateAlertV2/CreateAlertV2.styles.scss | 14 + .../container/CreateAlertV2/CreateAlertV2.tsx | 22 +- .../ChartPreview/ChartPreview.tsx | 73 +++++ .../QuerySection/ChartPreview/index.ts | 3 + .../QuerySection/QuerySection.tsx | 82 +++++ .../__tests__/ChartPreview.test.tsx | 262 +++++++++++++++ .../__tests__/QuerySection.test.tsx | 307 ++++++++++++++++++ .../CreateAlertV2/QuerySection/index.ts | 3 + .../CreateAlertV2/QuerySection/styles.scss | 90 +++++ .../CreateAlertV2/Stepper/styles.scss | 2 +- .../container/CreateAlertV2/context/index.tsx | 53 ++- .../container/CreateAlertV2/context/types.ts | 8 +- .../container/CreateAlertV2/context/utils.tsx | 55 ++++ .../FormAlertRules/ChartPreview/index.tsx | 18 +- .../container/FormAlertRules/QuerySection.tsx | 10 +- frontend/src/pages/CreateAlert/index.tsx | 4 +- 17 files changed, 1007 insertions(+), 20 deletions(-) create mode 100644 frontend/src/container/CreateAlertV2/QuerySection/ChartPreview/ChartPreview.tsx create mode 100644 frontend/src/container/CreateAlertV2/QuerySection/ChartPreview/index.ts create mode 100644 frontend/src/container/CreateAlertV2/QuerySection/QuerySection.tsx create mode 100644 frontend/src/container/CreateAlertV2/QuerySection/__tests__/ChartPreview.test.tsx create mode 100644 frontend/src/container/CreateAlertV2/QuerySection/__tests__/QuerySection.test.tsx create mode 100644 frontend/src/container/CreateAlertV2/QuerySection/index.ts create mode 100644 frontend/src/container/CreateAlertV2/QuerySection/styles.scss diff --git a/frontend/src/container/CreateAlertV2/CreateAlertHeader/__tests__/CreateAlertHeader.test.tsx b/frontend/src/container/CreateAlertV2/CreateAlertHeader/__tests__/CreateAlertHeader.test.tsx index 978c359c2932..adb4e8ed8b97 100644 --- a/frontend/src/container/CreateAlertV2/CreateAlertHeader/__tests__/CreateAlertHeader.test.tsx +++ b/frontend/src/container/CreateAlertV2/CreateAlertHeader/__tests__/CreateAlertHeader.test.tsx @@ -4,6 +4,27 @@ import { fireEvent, render, screen } from '@testing-library/react'; import { CreateAlertProvider } from '../../context'; import CreateAlertHeader from '../CreateAlertHeader'; +jest.mock('uplot', () => { + const paths = { + spline: jest.fn(), + bars: jest.fn(), + }; + const uplotMock = jest.fn(() => ({ + paths, + })); + return { + paths, + default: uplotMock, + }; +}); + +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + useLocation: (): { search: string } => ({ + search: '', + }), +})); + const renderCreateAlertHeader = (): ReturnType => render( diff --git a/frontend/src/container/CreateAlertV2/CreateAlertV2.styles.scss b/frontend/src/container/CreateAlertV2/CreateAlertV2.styles.scss index 916a70f958d0..23c38b075b8b 100644 --- a/frontend/src/container/CreateAlertV2/CreateAlertV2.styles.scss +++ b/frontend/src/container/CreateAlertV2/CreateAlertV2.styles.scss @@ -1,3 +1,17 @@ +$top-nav-background-1: #0f0f0f; +$top-nav-background-2: #101010; + .create-alert-v2-container { background-color: var(--bg-ink-500); } + +.top-nav-container { + background: repeating-linear-gradient( + -45deg, + $top-nav-background-1, + $top-nav-background-1 10px, + $top-nav-background-2 10px, + $top-nav-background-2 20px + ); + margin-bottom: 0; +} diff --git a/frontend/src/container/CreateAlertV2/CreateAlertV2.tsx b/frontend/src/container/CreateAlertV2/CreateAlertV2.tsx index b896f8d10de6..bdfededd130a 100644 --- a/frontend/src/container/CreateAlertV2/CreateAlertV2.tsx +++ b/frontend/src/container/CreateAlertV2/CreateAlertV2.tsx @@ -1,16 +1,32 @@ import './CreateAlertV2.styles.scss'; -import { CreateAlertProvider } from './context'; -import CreateAlertHeader from './CreateAlertHeader/CreateAlertHeader'; +import { initialQueriesMap } from 'constants/queryBuilder'; +import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; + +import { CreateAlertProvider } from './context'; +import CreateAlertHeader from './CreateAlertHeader'; +import QuerySection from './QuerySection'; + +function CreateAlertV2({ + initialQuery = initialQueriesMap.metrics, +}: { + initialQuery?: Query; +}): JSX.Element { + useShareBuilderUrl({ defaultValue: initialQuery }); -function CreateAlertV2(): JSX.Element { return (
+
); } +CreateAlertV2.defaultProps = { + initialQuery: initialQueriesMap.metrics, +}; + export default CreateAlertV2; diff --git a/frontend/src/container/CreateAlertV2/QuerySection/ChartPreview/ChartPreview.tsx b/frontend/src/container/CreateAlertV2/QuerySection/ChartPreview/ChartPreview.tsx new file mode 100644 index 000000000000..ae5876bc6afa --- /dev/null +++ b/frontend/src/container/CreateAlertV2/QuerySection/ChartPreview/ChartPreview.tsx @@ -0,0 +1,73 @@ +import { PANEL_TYPES } from 'constants/queryBuilder'; +import { useCreateAlertState } from 'container/CreateAlertV2/context'; +import ChartPreviewComponent from 'container/FormAlertRules/ChartPreview'; +import PlotTag from 'container/NewWidget/LeftContainer/WidgetGraph/PlotTag'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { useState } from 'react'; +import { useSelector } from 'react-redux'; +import { AppState } from 'store/reducers'; +import { EQueryType } from 'types/common/dashboard'; +import { GlobalReducer } from 'types/reducer/globalTime'; + +function ChartPreview(): JSX.Element { + const { currentQuery, panelType, stagedQuery } = useQueryBuilder(); + const { selectedTime: globalSelectedInterval } = useSelector< + AppState, + GlobalReducer + >((state) => state.globalTime); + const [, setQueryStatus] = useState(''); + const { alertDef } = useCreateAlertState(); + + const yAxisUnit = currentQuery.unit || ''; + + const renderQBChartPreview = (): JSX.Element => ( + + } + name="" + query={stagedQuery} + selectedInterval={globalSelectedInterval} + alertDef={alertDef} + yAxisUnit={yAxisUnit || ''} + graphType={panelType || PANEL_TYPES.TIME_SERIES} + setQueryStatus={setQueryStatus} + showSideLegend + /> + ); + + const renderPromAndChQueryChartPreview = (): JSX.Element => ( + + } + name="Chart Preview" + query={stagedQuery} + alertDef={alertDef} + selectedInterval={globalSelectedInterval} + yAxisUnit={yAxisUnit || ''} + graphType={panelType || PANEL_TYPES.TIME_SERIES} + setQueryStatus={setQueryStatus} + showSideLegend + /> + ); + + return ( +
+ {currentQuery.queryType === EQueryType.QUERY_BUILDER && + renderQBChartPreview()} + {currentQuery.queryType === EQueryType.PROM && + renderPromAndChQueryChartPreview()} + {currentQuery.queryType === EQueryType.CLICKHOUSE && + renderPromAndChQueryChartPreview()} +
+ ); +} + +export default ChartPreview; diff --git a/frontend/src/container/CreateAlertV2/QuerySection/ChartPreview/index.ts b/frontend/src/container/CreateAlertV2/QuerySection/ChartPreview/index.ts new file mode 100644 index 000000000000..8845923e0588 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/QuerySection/ChartPreview/index.ts @@ -0,0 +1,3 @@ +import ChartPreview from './ChartPreview'; + +export default ChartPreview; diff --git a/frontend/src/container/CreateAlertV2/QuerySection/QuerySection.tsx b/frontend/src/container/CreateAlertV2/QuerySection/QuerySection.tsx new file mode 100644 index 000000000000..644be2153d67 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/QuerySection/QuerySection.tsx @@ -0,0 +1,82 @@ +import './styles.scss'; + +import { Button } from 'antd'; +import classNames from 'classnames'; +import { PANEL_TYPES } from 'constants/queryBuilder'; +import QuerySectionComponent from 'container/FormAlertRules/QuerySection'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { BarChart2, DraftingCompass, FileText, ScrollText } from 'lucide-react'; +import { AlertTypes } from 'types/api/alerts/alertTypes'; + +import { useCreateAlertState } from '../context'; +import Stepper from '../Stepper'; +import ChartPreview from './ChartPreview'; + +function QuerySection(): JSX.Element { + const { currentQuery, handleRunQuery } = useQueryBuilder(); + const { alertType, setAlertType, alertDef } = useCreateAlertState(); + + const tabs = [ + { + label: 'Metrics', + icon: , + value: AlertTypes.METRICS_BASED_ALERT, + }, + { + label: 'Logs', + icon: , + value: AlertTypes.LOGS_BASED_ALERT, + }, + { + label: 'Traces', + icon: , + value: AlertTypes.TRACES_BASED_ALERT, + }, + { + label: 'Exceptions', + icon: , + value: AlertTypes.EXCEPTIONS_BASED_ALERT, + }, + ]; + + return ( +
+ + +
+
+ {tabs.map((tab) => ( + + ))} +
+
+ {}} + alertType={alertType} + runQuery={handleRunQuery} + alertDef={alertDef} + panelType={PANEL_TYPES.TIME_SERIES} + key={currentQuery.queryType} + ruleId="" + hideTitle + /> +
+ ); +} + +export default QuerySection; diff --git a/frontend/src/container/CreateAlertV2/QuerySection/__tests__/ChartPreview.test.tsx b/frontend/src/container/CreateAlertV2/QuerySection/__tests__/ChartPreview.test.tsx new file mode 100644 index 000000000000..f2c560c75482 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/QuerySection/__tests__/ChartPreview.test.tsx @@ -0,0 +1,262 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +/* eslint-disable react/destructuring-assignment */ +import { render, screen } from '@testing-library/react'; +import { PANEL_TYPES } from 'constants/queryBuilder'; +import { QueryClient, QueryClientProvider } from 'react-query'; +import { Provider } from 'react-redux'; +import { MemoryRouter } from 'react-router-dom'; +import store from 'store'; +import { EQueryType } from 'types/common/dashboard'; + +import { CreateAlertProvider } from '../../context'; +import ChartPreview from '../ChartPreview/ChartPreview'; + +// Constants for duplicate strings +const REQUESTS_PER_SEC = 'requests/sec'; +const CHART_PREVIEW_NAME = 'Chart Preview'; +const QUERY_TYPE_TEST_ID = 'query-type'; +const GRAPH_TYPE_TEST_ID = 'graph-type'; +const CHART_PREVIEW_COMPONENT_TEST_ID = 'chart-preview-component'; +const PLOT_QUERY_TYPE_TEST_ID = 'plot-query-type'; +const PLOT_PANEL_TYPE_TEST_ID = 'plot-panel-type'; + +jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({ + useQueryBuilder: jest.fn(), +})); +jest.mock( + 'container/FormAlertRules/ChartPreview', + () => + function MockChartPreviewComponent(props: any): JSX.Element { + return ( +
+
{props.headline}
+
{props.name}
+
{props.query?.queryType}
+
+ {props.selectedInterval?.startTime} +
+
{props.yAxisUnit}
+
{props.graphType}
+
+ ); + }, +); +jest.mock( + 'container/NewWidget/LeftContainer/WidgetGraph/PlotTag', + () => + function MockPlotTag(props: any): JSX.Element { + return ( +
+
{props.queryType}
+
{props.panelType}
+
+ ); + }, +); +jest.mock('uplot', () => { + const paths = { + spline: jest.fn(), + bars: jest.fn(), + }; + const uplotMock = jest.fn(() => ({ + paths, + })); + return { + paths, + default: uplotMock, + }; +}); + +// Mock react-redux +jest.mock('react-redux', () => ({ + ...jest.requireActual('react-redux'), + useSelector: (): any => ({ + globalTime: { + selectedTime: { + startTime: 1713734400000, + endTime: 1713738000000, + }, + maxTime: 1713738000000, + minTime: 1713734400000, + }, + }), +})); + +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + retry: false, + }, + }, +}); + +const mockUseQueryBuilder = { + currentQuery: { + queryType: EQueryType.QUERY_BUILDER, + unit: REQUESTS_PER_SEC, + builder: { + queryData: [ + { + dataSource: 'metrics', + }, + ], + }, + }, + panelType: PANEL_TYPES.TIME_SERIES, + stagedQuery: { + queryType: EQueryType.QUERY_BUILDER, + unit: REQUESTS_PER_SEC, + }, +}; + +const renderChartPreview = (): ReturnType => + render( + + + + + + + + + , + ); + +describe('ChartPreview', () => { + const { useQueryBuilder } = jest.requireMock( + 'hooks/queryBuilder/useQueryBuilder', + ); + + beforeEach(() => { + jest.clearAllMocks(); + useQueryBuilder.mockReturnValue(mockUseQueryBuilder); + }); + + it('renders the component with correct container class', () => { + renderChartPreview(); + + const container = screen + .getByTestId(CHART_PREVIEW_COMPONENT_TEST_ID) + .closest('.chart-preview-container'); + expect(container).toBeInTheDocument(); + }); + + it('renders QueryBuilder chart preview when query type is QUERY_BUILDER', () => { + renderChartPreview(); + + expect( + screen.getByTestId(CHART_PREVIEW_COMPONENT_TEST_ID), + ).toBeInTheDocument(); + expect(screen.getByTestId('plot-tag')).toBeInTheDocument(); + expect(screen.getByTestId(PLOT_QUERY_TYPE_TEST_ID)).toHaveTextContent( + EQueryType.QUERY_BUILDER, + ); + expect(screen.getByTestId(PLOT_PANEL_TYPE_TEST_ID)).toHaveTextContent( + PANEL_TYPES.TIME_SERIES, + ); + }); + + it('renders QueryBuilder chart preview with empty name when query type is QUERY_BUILDER', () => { + renderChartPreview(); + + expect(screen.getByTestId('name')).toHaveTextContent(''); + }); + + it('renders QueryBuilder chart preview with correct props', () => { + renderChartPreview(); + + expect(screen.getByTestId(QUERY_TYPE_TEST_ID)).toHaveTextContent( + EQueryType.QUERY_BUILDER, + ); + expect(screen.getByTestId('y-axis-unit')).toHaveTextContent(REQUESTS_PER_SEC); + expect(screen.getByTestId(GRAPH_TYPE_TEST_ID)).toHaveTextContent( + PANEL_TYPES.TIME_SERIES, + ); + expect(screen.getByTestId('name')).toHaveTextContent(''); + expect(screen.getByTestId('headline')).toBeInTheDocument(); + expect(screen.getByTestId('selected-interval')).toBeInTheDocument(); + }); + + it('renders PromQL chart preview when query type is PROM', () => { + useQueryBuilder.mockReturnValue({ + ...mockUseQueryBuilder, + currentQuery: { + ...mockUseQueryBuilder.currentQuery, + queryType: EQueryType.PROM, + }, + stagedQuery: { + queryType: EQueryType.PROM, + unit: REQUESTS_PER_SEC, + }, + }); + + renderChartPreview(); + + expect( + screen.getByTestId(CHART_PREVIEW_COMPONENT_TEST_ID), + ).toBeInTheDocument(); + expect(screen.getByTestId('name')).toHaveTextContent(CHART_PREVIEW_NAME); + expect(screen.getByTestId(QUERY_TYPE_TEST_ID)).toHaveTextContent( + EQueryType.PROM, + ); + }); + + it('renders ClickHouse chart preview when query type is CLICKHOUSE', () => { + useQueryBuilder.mockReturnValue({ + ...mockUseQueryBuilder, + currentQuery: { + ...mockUseQueryBuilder.currentQuery, + queryType: EQueryType.CLICKHOUSE, + }, + stagedQuery: { + queryType: EQueryType.CLICKHOUSE, + unit: REQUESTS_PER_SEC, + }, + }); + + renderChartPreview(); + + expect( + screen.getByTestId(CHART_PREVIEW_COMPONENT_TEST_ID), + ).toBeInTheDocument(); + expect(screen.getByTestId('name')).toHaveTextContent(CHART_PREVIEW_NAME); + expect(screen.getByTestId(QUERY_TYPE_TEST_ID)).toHaveTextContent( + EQueryType.CLICKHOUSE, + ); + }); + + it('uses default panel type when panelType is not provided', () => { + useQueryBuilder.mockReturnValue({ + ...mockUseQueryBuilder, + panelType: undefined, + }); + + renderChartPreview(); + + expect(screen.getByTestId(PLOT_PANEL_TYPE_TEST_ID)).toHaveTextContent( + PANEL_TYPES.TIME_SERIES, + ); + expect(screen.getByTestId(GRAPH_TYPE_TEST_ID)).toHaveTextContent( + PANEL_TYPES.TIME_SERIES, + ); + expect(screen.getByTestId(QUERY_TYPE_TEST_ID)).toHaveTextContent( + EQueryType.QUERY_BUILDER, + ); + }); + + it('uses custom panel type when provided', () => { + useQueryBuilder.mockReturnValue({ + ...mockUseQueryBuilder, + panelType: PANEL_TYPES.BAR, + }); + + renderChartPreview(); + + expect(screen.getByTestId(PLOT_PANEL_TYPE_TEST_ID)).toHaveTextContent( + PANEL_TYPES.BAR, + ); + expect(screen.getByTestId(GRAPH_TYPE_TEST_ID)).toHaveTextContent( + PANEL_TYPES.BAR, + ); + }); +}); diff --git a/frontend/src/container/CreateAlertV2/QuerySection/__tests__/QuerySection.test.tsx b/frontend/src/container/CreateAlertV2/QuerySection/__tests__/QuerySection.test.tsx new file mode 100644 index 000000000000..7f341c6956ca --- /dev/null +++ b/frontend/src/container/CreateAlertV2/QuerySection/__tests__/QuerySection.test.tsx @@ -0,0 +1,307 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { render, screen } from '@testing-library/react'; +import userEvent from '@testing-library/user-event'; +import { QueryParams } from 'constants/query'; +import { QueryClient, QueryClientProvider } from 'react-query'; +import { Provider } from 'react-redux'; +import { MemoryRouter } from 'react-router-dom'; +import store from 'store'; +import { AlertTypes } from 'types/api/alerts/alertTypes'; + +import { CreateAlertProvider } from '../../context'; +import QuerySection from '../QuerySection'; + +jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({ + useQueryBuilder: jest.fn(), +})); +jest.mock('uplot', () => { + const paths = { + spline: jest.fn(), + bars: jest.fn(), + }; + const uplotMock = jest.fn(() => ({ + paths, + })); + return { + paths, + default: uplotMock, + }; +}); +jest.mock('react-redux', () => ({ + ...jest.requireActual('react-redux'), + useSelector: (): any => ({ + globalTime: { + selectedTime: { + startTime: 1713734400000, + endTime: 1713738000000, + }, + maxTime: 1713738000000, + minTime: 1713734400000, + }, + }), +})); +jest.mock( + 'container/FormAlertRules/QuerySection', + () => + function MockQuerySectionComponent({ + queryCategory, + alertType, + panelType, + }: any): JSX.Element { + return ( +
+
{queryCategory}
+
{alertType}
+
{panelType}
+
+ ); + }, +); +jest.mock( + '../ChartPreview', + () => + function MockChartPreview(): JSX.Element { + return
Chart Preview
; + }, +); +jest.mock( + '../../Stepper', + () => + function MockStepper({ stepNumber, label }: any): JSX.Element { + return ( +
+
{stepNumber}
+
{label}
+
+ ); + }, +); + +const mockUseQueryBuilder = { + currentQuery: { + queryType: 'query_builder', + unit: 'requests/sec', + builder: { + queryData: [ + { + dataSource: 'metrics', + }, + ], + }, + }, + handleRunQuery: jest.fn(), + redirectWithQueryBuilderData: jest.fn(), +}; +const queryClient = new QueryClient({ + defaultOptions: { + queries: { + retry: false, + }, + }, +}); +const renderQuerySection = (): ReturnType => + render( + + + + + + + + + , + ); + +const METRICS_TEXT = 'Metrics'; +const QUERY_BUILDER_TEXT = 'query_builder'; +const LOGS_TEXT = 'Logs'; +const TRACES_TEXT = 'Traces'; +const ACTIVE_TAB_CLASS = 'active-tab'; + +describe('QuerySection', () => { + const { useQueryBuilder } = jest.requireMock( + 'hooks/queryBuilder/useQueryBuilder', + ); + + beforeEach(() => { + jest.clearAllMocks(); + useQueryBuilder.mockReturnValue(mockUseQueryBuilder); + }); + + it('renders the component with all required elements', () => { + renderQuerySection(); + + // Check if Stepper is rendered + expect(screen.getByTestId('stepper')).toBeInTheDocument(); + expect(screen.getByTestId('step-number')).toHaveTextContent('1'); + expect(screen.getByTestId('step-label')).toHaveTextContent( + 'Define the query you want to set an alert on', + ); + + // Check if ChartPreview is rendered + expect(screen.getByTestId('chart-preview')).toBeInTheDocument(); + + // Check if QuerySectionComponent is rendered + expect(screen.getByTestId('query-section-component')).toBeInTheDocument(); + expect(screen.getByTestId('query-category')).toHaveTextContent( + QUERY_BUILDER_TEXT, + ); + expect(screen.getByTestId('alert-type')).toHaveTextContent( + AlertTypes.METRICS_BASED_ALERT, + ); + expect(screen.getByTestId('panel-type')).toHaveTextContent('graph'); + }); + + it('renders all three alert type tabs', () => { + renderQuerySection(); + + // Check if all tabs are rendered + expect(screen.getByText(METRICS_TEXT)).toBeInTheDocument(); + expect(screen.getByText('Logs')).toBeInTheDocument(); + expect(screen.getByText('Traces')).toBeInTheDocument(); + + // Check if icons are rendered + expect(screen.getByTestId('metrics-view')).toBeInTheDocument(); + expect(screen.getByTestId('logs-view')).toBeInTheDocument(); + expect(screen.getByTestId('traces-view')).toBeInTheDocument(); + }); + + it('shows Metrics tab as active by default', () => { + renderQuerySection(); + + const metricsTab = screen.getByText(METRICS_TEXT).closest('button'); + expect(metricsTab).toHaveClass(ACTIVE_TAB_CLASS); + }); + + it('handles alert type change when clicking on different tabs', async () => { + const user = userEvent.setup(); + renderQuerySection(); + + // Click on Logs tab + const logsTab = screen.getByText(LOGS_TEXT); + await user.click(logsTab); + + // Verify that redirectWithQueryBuilderData was called with correct data + expect(mockUseQueryBuilder.redirectWithQueryBuilderData).toHaveBeenCalledWith( + expect.any(Object), + { + [QueryParams.alertType]: AlertTypes.LOGS_BASED_ALERT, + }, + undefined, + true, + ); + + // Click on Traces tab + const tracesTab = screen.getByText(TRACES_TEXT); + await user.click(tracesTab); + + // Verify that redirectWithQueryBuilderData was called with correct data + expect(mockUseQueryBuilder.redirectWithQueryBuilderData).toHaveBeenCalledWith( + expect.any(Object), + { + [QueryParams.alertType]: AlertTypes.TRACES_BASED_ALERT, + }, + undefined, + true, + ); + }); + + it('updates active tab when alert type changes', async () => { + const user = userEvent.setup(); + renderQuerySection(); + + // Initially Metrics should be active + const metricsTab = screen.getByText(METRICS_TEXT).closest('button'); + expect(metricsTab).toHaveClass(ACTIVE_TAB_CLASS); + + // Click on Logs tab + const logsTab = screen.getByText(LOGS_TEXT); + await user.click(logsTab); + + // Logs should now be active + const logsButton = logsTab.closest('button'); + expect(logsButton).toHaveClass(ACTIVE_TAB_CLASS); + expect(metricsTab).not.toHaveClass(ACTIVE_TAB_CLASS); + }); + + it('passes correct props to QuerySectionComponent', () => { + renderQuerySection(); + + // Check if the component receives the correct props + expect(screen.getByTestId('query-category')).toHaveTextContent( + QUERY_BUILDER_TEXT, + ); + expect(screen.getByTestId('alert-type')).toHaveTextContent( + AlertTypes.METRICS_BASED_ALERT, + ); + expect(screen.getByTestId('panel-type')).toHaveTextContent('graph'); + }); + + it('has correct CSS classes for tab styling', () => { + renderQuerySection(); + + const tabs = screen.getAllByRole('button'); + + tabs.forEach((tab) => { + expect(tab).toHaveClass('list-view-tab'); + expect(tab).toHaveClass('explorer-view-option'); + }); + }); + + it('renders with correct container structure', () => { + renderQuerySection(); + + const container = screen.getByText(METRICS_TEXT).closest('.query-section'); + expect(container).toBeInTheDocument(); + + const tabsContainer = screen + .getByText(METRICS_TEXT) + .closest('.query-section-tabs'); + expect(tabsContainer).toBeInTheDocument(); + + const actionsContainer = screen + .getByText(METRICS_TEXT) + .closest('.query-section-query-actions'); + expect(actionsContainer).toBeInTheDocument(); + }); + + it('handles multiple rapid tab clicks correctly', async () => { + const user = userEvent.setup(); + renderQuerySection(); + + const logsTab = screen.getByText('Logs'); + const tracesTab = screen.getByText('Traces'); + + // Rapidly click on different tabs + await user.click(logsTab); + await user.click(tracesTab); + await user.click(logsTab); + + // Should have called redirectWithQueryBuilderData 3 times + expect( + mockUseQueryBuilder.redirectWithQueryBuilderData, + ).toHaveBeenCalledTimes(3); + }); + + it('maintains tab state correctly after interactions', async () => { + const user = userEvent.setup(); + renderQuerySection(); + + // Click on Logs tab + const logsTab = screen.getByText('Logs'); + await user.click(logsTab); + + // Verify Logs is active + const logsButton = logsTab.closest('button'); + expect(logsButton).toHaveClass(ACTIVE_TAB_CLASS); + + // Click back to Metrics + const metricsTab = screen.getByText(METRICS_TEXT); + await user.click(metricsTab); + + // Verify Metrics is active again + const metricsButton = metricsTab.closest('button'); + expect(metricsButton).toHaveClass(ACTIVE_TAB_CLASS); + expect(logsButton).not.toHaveClass(ACTIVE_TAB_CLASS); + }); +}); diff --git a/frontend/src/container/CreateAlertV2/QuerySection/index.ts b/frontend/src/container/CreateAlertV2/QuerySection/index.ts new file mode 100644 index 000000000000..b3ff502ac5fd --- /dev/null +++ b/frontend/src/container/CreateAlertV2/QuerySection/index.ts @@ -0,0 +1,3 @@ +import QuerySection from './QuerySection'; + +export default QuerySection; diff --git a/frontend/src/container/CreateAlertV2/QuerySection/styles.scss b/frontend/src/container/CreateAlertV2/QuerySection/styles.scss new file mode 100644 index 000000000000..3faed92151b0 --- /dev/null +++ b/frontend/src/container/CreateAlertV2/QuerySection/styles.scss @@ -0,0 +1,90 @@ +.query-section { + margin: 0 16px; + .query-section-tabs { + display: flex; + align-items: center; + margin-left: 12px; + margin-top: 24px; + + .query-section-query-actions { + display: flex; + border-radius: 2px; + border: 1px solid var(--bg-slate-400); + background: var(--bg-ink-300); + flex-direction: row; + border-bottom: none; + margin-bottom: -1px; + + .prom-ql-icon { + height: 14px; + width: 14px; + } + + .explorer-view-option { + display: flex; + align-items: center; + justify-content: center; + flex-direction: row; + border: none; + padding: 9px; + box-shadow: none; + border-radius: 0px; + border-left: 0.5px solid var(--bg-slate-400); + border-bottom: 0.5px solid var(--bg-slate-400); + width: 120px; + height: 36px; + + gap: 8px; + + &.active-tab { + background-color: var(--bg-ink-500); + border-bottom: none; + + &:hover { + background-color: var(--bg-ink-500) !important; + } + } + + &:disabled { + background-color: var(--bg-ink-300); + opacity: 0.6; + } + + &:first-child { + border-left: 1px solid transparent; + } + + &:hover { + background-color: transparent !important; + border-left: 1px solid transparent !important; + color: var(--bg-vanilla-100); + } + } + } + + .frequency-chart-view-controller { + display: flex; + align-items: center; + padding-left: 8px; + gap: 8px; + } + } + + .chart-preview-container { + margin-right: 4px; + .alert-chart-container { + .ant-card { + border: 1px solid var(--bg-slate-500); + .ant-card-body { + background-color: var(--bg-ink-500); + } + } + } + } + + .alert-query-section-container { + margin: 0; + background-color: var(--bg-ink-500); + border: 1px solid var(--bg-slate-400); + } +} diff --git a/frontend/src/container/CreateAlertV2/Stepper/styles.scss b/frontend/src/container/CreateAlertV2/Stepper/styles.scss index db56be0695a1..ed0ee65a2881 100644 --- a/frontend/src/container/CreateAlertV2/Stepper/styles.scss +++ b/frontend/src/container/CreateAlertV2/Stepper/styles.scss @@ -3,7 +3,7 @@ align-items: center; gap: 16px; margin-bottom: 16px; - padding: 16px; + padding: 16px 0; border-radius: 8px; } diff --git a/frontend/src/container/CreateAlertV2/context/index.tsx b/frontend/src/container/CreateAlertV2/context/index.tsx index 53839bbc0727..4f1d4d50d735 100644 --- a/frontend/src/container/CreateAlertV2/context/index.tsx +++ b/frontend/src/container/CreateAlertV2/context/index.tsx @@ -1,18 +1,25 @@ +import { QueryParams } from 'constants/query'; +import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder'; +import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi'; import { createContext, + useCallback, useContext, useMemo, useReducer, useState, } from 'react'; +import { useLocation } from 'react-router-dom'; +import { AlertTypes } from 'types/api/alerts/alertTypes'; +import { AlertDef } from 'types/api/alerts/def'; import { INITIAL_ALERT_STATE } from './constants'; +import { ICreateAlertContextProps, ICreateAlertProviderProps } from './types'; import { - AlertCreationStep, - ICreateAlertContextProps, - ICreateAlertProviderProps, -} from './types'; -import { alertCreationReducer } from './utils'; + alertCreationReducer, + buildInitialAlertDef, + getInitialAlertTypeFromURL, +} from './utils'; const CreateAlertContext = createContext(null); @@ -36,18 +43,44 @@ export function CreateAlertProvider( alertCreationReducer, INITIAL_ALERT_STATE, ); - const [step, setStep] = useState( - AlertCreationStep.ALERT_DEFINITION, + + const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder(); + const location = useLocation(); + const queryParams = new URLSearchParams(location.search); + + const [alertType, setAlertType] = useState(() => + getInitialAlertTypeFromURL(queryParams, currentQuery), + ); + const [alertDef] = useState(buildInitialAlertDef(alertType)); + + const handleAlertTypeChange = useCallback( + (value: AlertTypes): void => { + const queryToRedirect = buildInitialAlertDef(value); + const currentQueryToRedirect = mapQueryDataFromApi( + queryToRedirect.condition.compositeQuery, + ); + redirectWithQueryBuilderData( + currentQueryToRedirect, + { + [QueryParams.alertType]: value, + }, + undefined, + true, + ); + setAlertType(value); + }, + [redirectWithQueryBuilderData], ); const contextValue: ICreateAlertContextProps = useMemo( () => ({ alertState, setAlertState, - step, - setStep, + alertType, + setAlertType: handleAlertTypeChange, + alertDef, }), - [alertState, setAlertState, step, setStep], + [alertState, alertType, handleAlertTypeChange, alertDef], ); return ( diff --git a/frontend/src/container/CreateAlertV2/context/types.ts b/frontend/src/container/CreateAlertV2/context/types.ts index be0f8e53ca2d..ee90b03b8827 100644 --- a/frontend/src/container/CreateAlertV2/context/types.ts +++ b/frontend/src/container/CreateAlertV2/context/types.ts @@ -1,11 +1,13 @@ import { Dispatch } from 'react'; -import { Labels } from 'types/api/alerts/def'; +import { AlertTypes } from 'types/api/alerts/alertTypes'; +import { AlertDef, Labels } from 'types/api/alerts/def'; export interface ICreateAlertContextProps { alertState: AlertState; setAlertState: Dispatch; - step: AlertCreationStep; - setStep: Dispatch; + alertType: AlertTypes; + setAlertType: Dispatch; + alertDef: AlertDef; } export interface ICreateAlertProviderProps { diff --git a/frontend/src/container/CreateAlertV2/context/utils.tsx b/frontend/src/container/CreateAlertV2/context/utils.tsx index d04a8d1b49d2..9c427a6e9aed 100644 --- a/frontend/src/container/CreateAlertV2/context/utils.tsx +++ b/frontend/src/container/CreateAlertV2/context/utils.tsx @@ -1,3 +1,16 @@ +import { QueryParams } from 'constants/query'; +import { + alertDefaults, + anamolyAlertDefaults, + exceptionAlertDefaults, + logAlertDefaults, + traceAlertDefaults, +} from 'container/CreateAlertRule/defaults'; +import { AlertTypes } from 'types/api/alerts/alertTypes'; +import { AlertDef } from 'types/api/alerts/def'; +import { Query } from 'types/api/queryBuilder/queryBuilderData'; +import { DataSource } from 'types/common/queryBuilder'; + import { AlertState, CreateAlertAction } from './types'; export const alertCreationReducer = ( @@ -24,3 +37,45 @@ export const alertCreationReducer = ( return state; } }; + +export function getInitialAlertType(currentQuery: Query): AlertTypes { + const dataSource = + currentQuery.builder.queryData[0].dataSource || DataSource.METRICS; + switch (dataSource) { + case DataSource.METRICS: + return AlertTypes.METRICS_BASED_ALERT; + case DataSource.LOGS: + return AlertTypes.LOGS_BASED_ALERT; + case DataSource.TRACES: + return AlertTypes.TRACES_BASED_ALERT; + default: + return AlertTypes.METRICS_BASED_ALERT; + } +} + +export function buildInitialAlertDef(alertType: AlertTypes): AlertDef { + switch (alertType) { + case AlertTypes.LOGS_BASED_ALERT: + return logAlertDefaults; + case AlertTypes.TRACES_BASED_ALERT: + return traceAlertDefaults; + case AlertTypes.EXCEPTIONS_BASED_ALERT: + return exceptionAlertDefaults; + case AlertTypes.ANOMALY_BASED_ALERT: + return anamolyAlertDefaults; + case AlertTypes.METRICS_BASED_ALERT: + return alertDefaults; + default: + return alertDefaults; + } +} + +export function getInitialAlertTypeFromURL( + urlSearchParams: URLSearchParams, + currentQuery: Query, +): AlertTypes { + const alertTypeFromURL = urlSearchParams.get(QueryParams.alertType); + return alertTypeFromURL + ? (alertTypeFromURL as AlertTypes) + : getInitialAlertType(currentQuery); +} diff --git a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx index c6a4a7be6e50..4390da8f9f9a 100644 --- a/frontend/src/container/FormAlertRules/ChartPreview/index.tsx +++ b/frontend/src/container/FormAlertRules/ChartPreview/index.tsx @@ -65,6 +65,7 @@ export interface ChartPreviewProps { allowSelectedIntervalForStepGen?: boolean; yAxisUnit: string; setQueryStatus?: (status: string) => void; + showSideLegend?: boolean; } // eslint-disable-next-line sonarjs/cognitive-complexity @@ -80,6 +81,7 @@ function ChartPreview({ alertDef, yAxisUnit, setQueryStatus, + showSideLegend = false, }: ChartPreviewProps): JSX.Element | null { const { t } = useTranslation('alerts'); const dispatch = useDispatch(); @@ -236,6 +238,18 @@ function ChartPreview({ const { timezone } = useTimezone(); + const legendPosition = useMemo(() => { + if (!showSideLegend) { + return LegendPosition.BOTTOM; + } + const numberOfSeries = + queryResponse?.data?.payload?.data?.result?.length || 0; + if (numberOfSeries <= 1) { + return LegendPosition.BOTTOM; + } + return LegendPosition.RIGHT; + }, [queryResponse?.data?.payload?.data?.result?.length, showSideLegend]); + const options = useMemo( () => getUPlotChartOptions({ @@ -279,7 +293,7 @@ function ChartPreview({ graphsVisibilityStates: graphVisibility, setGraphsVisibilityStates: setGraphVisibility, enhancedLegend: true, - legendPosition: LegendPosition.BOTTOM, + legendPosition, }), [ yAxisUnit, @@ -298,6 +312,7 @@ function ChartPreview({ currentQuery, query, graphVisibility, + legendPosition, ], ); @@ -370,6 +385,7 @@ ChartPreview.defaultProps = { allowSelectedIntervalForStepGen: false, alertDef: undefined, setQueryStatus: (): void => {}, + showSideLegend: false, }; export default ChartPreview; diff --git a/frontend/src/container/FormAlertRules/QuerySection.tsx b/frontend/src/container/FormAlertRules/QuerySection.tsx index 5009c0121ae5..1238a8501551 100644 --- a/frontend/src/container/FormAlertRules/QuerySection.tsx +++ b/frontend/src/container/FormAlertRules/QuerySection.tsx @@ -31,6 +31,7 @@ function QuerySection({ alertDef, panelType, ruleId, + hideTitle, }: QuerySectionProps): JSX.Element { // init namespace for translations const { t } = useTranslation('alerts'); @@ -218,7 +219,9 @@ function QuerySection({ return ( <> - {t('alert_form_step2', { step: step2Label })} + {!hideTitle && ( + {t('alert_form_step2', { step: step2Label })} + )}
{renderTabs(alertType)}
{renderQuerySection(currentTab)} @@ -235,6 +238,11 @@ interface QuerySectionProps { alertDef: AlertDef; panelType: PANEL_TYPES; ruleId: string; + hideTitle?: boolean; } +QuerySection.defaultProps = { + hideTitle: false, +}; + export default QuerySection; diff --git a/frontend/src/pages/CreateAlert/index.tsx b/frontend/src/pages/CreateAlert/index.tsx index 9172adfefbff..b09a2503d2cc 100644 --- a/frontend/src/pages/CreateAlert/index.tsx +++ b/frontend/src/pages/CreateAlert/index.tsx @@ -1,6 +1,8 @@ import CreateAlertRule from 'container/CreateAlertRule'; -import CreateAlertV2 from 'container/CreateAlertV2'; import { showNewCreateAlertsPage } from 'container/CreateAlertV2/utils'; +import { lazy } from 'react'; + +const CreateAlertV2 = lazy(() => import('container/CreateAlertV2')); function CreateAlertPage(): JSX.Element { const showNewCreateAlertsPageFlag = showNewCreateAlertsPage(); From 9d999feabb2d4ea9dc73e4cb22322611a1228e3b Mon Sep 17 00:00:00 2001 From: SagarRajput-7 <162284829+SagarRajput-7@users.noreply.github.com> Date: Wed, 10 Sep 2025 20:26:40 +0530 Subject: [PATCH 14/51] feat: change Bar color opacity and make stacking as default (#9026) --- .../src/container/ApiMonitoring/utils.tsx | 1 - .../GridCard/WidgetGraphComponent.test.tsx | 1 - .../MeterExplorer/Breakdown/graphs.ts | 1 - .../MetricsApplication.factory.ts | 1 - .../NewWidget/RightContainer/index.tsx | 2 - .../NewWidget/__test__/NewWidget.test.tsx | 95 ++++++++++++++++++- frontend/src/container/NewWidget/index.tsx | 10 -- frontend/src/container/NewWidget/utils.ts | 2 +- .../__tests__/tablePanelWrapperHelper.ts | 1 - .../__tests__/valuePanelWrapperHelper.ts | 1 - .../container/QueryTable/__test__/mocks.ts | 1 - frontend/src/hooks/dashboard/utils.ts | 1 - .../src/lib/uPlotLib/utils/getSeriesData.ts | 2 +- .../utils/tests/getSeriesData.test.ts | 2 +- .../MQDetails/MetricPage/MetricPageUtil.ts | 1 - frontend/src/types/api/dashboard/getAll.ts | 1 - 16 files changed, 97 insertions(+), 26 deletions(-) diff --git a/frontend/src/container/ApiMonitoring/utils.tsx b/frontend/src/container/ApiMonitoring/utils.tsx index 6026ddbb8d9e..bb68d12248ee 100644 --- a/frontend/src/container/ApiMonitoring/utils.tsx +++ b/frontend/src/container/ApiMonitoring/utils.tsx @@ -3168,7 +3168,6 @@ export const getStatusCodeBarChartWidgetData = ( }, description: '', id: '315b15fa-ff0c-442f-89f8-2bf4fb1af2f2', - isStacked: false, panelTypes: PANEL_TYPES.BAR, title: '', opacity: '', diff --git a/frontend/src/container/GridCardLayout/GridCard/WidgetGraphComponent.test.tsx b/frontend/src/container/GridCardLayout/GridCard/WidgetGraphComponent.test.tsx index 3de8d2fdf9e0..1f0522546bb5 100644 --- a/frontend/src/container/GridCardLayout/GridCard/WidgetGraphComponent.test.tsx +++ b/frontend/src/container/GridCardLayout/GridCard/WidgetGraphComponent.test.tsx @@ -53,7 +53,6 @@ const mockProps: WidgetGraphComponentProps = { description: '', fillSpans: false, id: '17f905f6-d355-46bd-a78e-cbc87e6f58cc', - isStacked: false, mergeAllActiveQueries: false, nullZeroValues: 'zero', opacity: '1', diff --git a/frontend/src/container/MeterExplorer/Breakdown/graphs.ts b/frontend/src/container/MeterExplorer/Breakdown/graphs.ts index a122bdcf7c0f..4948537f98b6 100644 --- a/frontend/src/container/MeterExplorer/Breakdown/graphs.ts +++ b/frontend/src/container/MeterExplorer/Breakdown/graphs.ts @@ -38,7 +38,6 @@ export const getWidgetQueryBuilder = ({ }: GetWidgetQueryPropsReturn): Widgets => ({ description: description || '', id: id || uuid(), - isStacked: false, nullZeroValues: nullZeroValues || '', opacity: '1', panelTypes, diff --git a/frontend/src/container/MetricsApplication/MetricsApplication.factory.ts b/frontend/src/container/MetricsApplication/MetricsApplication.factory.ts index 15eff76f8b7c..0157acfeac28 100644 --- a/frontend/src/container/MetricsApplication/MetricsApplication.factory.ts +++ b/frontend/src/container/MetricsApplication/MetricsApplication.factory.ts @@ -14,7 +14,6 @@ export const getWidgetQueryBuilder = ({ }: GetWidgetQueryBuilderProps): Widgets => ({ description: '', id: id || v4(), - isStacked: false, nullZeroValues: '', opacity: '0', panelTypes, diff --git a/frontend/src/container/NewWidget/RightContainer/index.tsx b/frontend/src/container/NewWidget/RightContainer/index.tsx index 3e693b625876..4a374d977d07 100644 --- a/frontend/src/container/NewWidget/RightContainer/index.tsx +++ b/frontend/src/container/NewWidget/RightContainer/index.tsx @@ -535,8 +535,6 @@ interface RightContainerProps { setTitle: Dispatch>; description: string; setDescription: Dispatch>; - stacked: boolean; - setStacked: Dispatch>; opacity: string; setOpacity: Dispatch>; selectedNullZeroValue: string; diff --git a/frontend/src/container/NewWidget/__test__/NewWidget.test.tsx b/frontend/src/container/NewWidget/__test__/NewWidget.test.tsx index 68b58f3e017d..3548583a5090 100644 --- a/frontend/src/container/NewWidget/__test__/NewWidget.test.tsx +++ b/frontend/src/container/NewWidget/__test__/NewWidget.test.tsx @@ -6,7 +6,48 @@ // - Handling multiple rows correctly // - Handling widgets with different heights -import { placeWidgetAtBottom, placeWidgetBetweenRows } from '../utils'; +import { PANEL_TYPES } from 'constants/queryBuilder'; +import { DashboardProvider } from 'providers/Dashboard/Dashboard'; +import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider'; +import { I18nextProvider } from 'react-i18next'; +import { useSearchParams } from 'react-router-dom-v5-compat'; +import i18n from 'ReactI18'; +import { render } from 'tests/test-utils'; + +import NewWidget from '..'; +import { + getDefaultWidgetData, + placeWidgetAtBottom, + placeWidgetBetweenRows, +} from '../utils'; + +const MOCK_SEARCH_PARAMS = + '?graphType=bar&widgetId=b473eef0-8eb5-4dd3-8089-c1817734084f&compositeQuery=%7B"id"%3A"f026c678-9abf-42af-a3dc-f73dc8cbb810"%2C"builder"%3A%7B"queryData"%3A%5B%7B"dataSource"%3A"metrics"%2C"queryName"%3A"A"%2C"aggregateOperator"%3A"count"%2C"aggregateAttribute"%3A%7B"id"%3A"----"%2C"dataType"%3A""%2C"key"%3A""%2C"type"%3A""%7D%2C"timeAggregation"%3A"rate"%2C"spaceAggregation"%3A"sum"%2C"filter"%3A%7B"expression"%3A""%7D%2C"aggregations"%3A%5B%7B"metricName"%3A""%2C"temporality"%3A""%2C"timeAggregation"%3A"count"%2C"spaceAggregation"%3A"sum"%2C"reduceTo"%3A"avg"%7D%5D%2C"functions"%3A%5B%5D%2C"filters"%3A%7B"items"%3A%5B%5D%2C"op"%3A"AND"%7D%2C"expression"%3A"A"%2C"disabled"%3Afalse%2C"stepInterval"%3Anull%2C"having"%3A%5B%5D%2C"limit"%3Anull%2C"orderBy"%3A%5B%5D%2C"groupBy"%3A%5B%5D%2C"legend"%3A""%2C"reduceTo"%3A"avg"%2C"source"%3A""%7D%5D%2C"queryFormulas"%3A%5B%5D%2C"queryTraceOperator"%3A%5B%5D%7D%2C"clickhouse_sql"%3A%5B%7B"name"%3A"A"%2C"legend"%3A""%2C"disabled"%3Afalse%2C"query"%3A""%7D%5D%2C"promql"%3A%5B%7B"name"%3A"A"%2C"query"%3A""%2C"legend"%3A""%2C"disabled"%3Afalse%7D%5D%2C"queryType"%3A"builder"%7D&relativeTime=30m'; +// Mocks +jest.mock('uplot', () => ({ + paths: { spline: jest.fn(), bars: jest.fn() }, + default: jest.fn(() => ({ paths: { spline: jest.fn(), bars: jest.fn() } })), +})); + +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + useLocation: (): { pathname: string; search: string } => ({ + pathname: '', + search: MOCK_SEARCH_PARAMS, + }), +})); + +jest.mock('hooks/useSafeNavigate', () => ({ + useSafeNavigate: (): { safeNavigate: jest.Mock } => ({ + safeNavigate: jest.fn(), + }), +})); + +jest.mock('react-router-dom-v5-compat', () => ({ + ...jest.requireActual('react-router-dom-v5-compat'), + useSearchParams: jest.fn(), + useNavigationType: jest.fn(() => 'PUSH'), +})); describe('placeWidgetAtBottom', () => { it('should place widget at (0,0) when layout is empty', () => { @@ -216,3 +257,55 @@ describe('placeWidgetBetweenRows', () => { ]); }); }); + +describe('getDefaultWidgetData', () => { + it('should set stackedBarChart to true by default for new panel creation', () => { + const widgetId = 'test-widget-123'; + const panelType = PANEL_TYPES.BAR; + + const result = getDefaultWidgetData(widgetId, panelType); + + expect(result.stackedBarChart).toBe(true); + expect(result.id).toBe(widgetId); + expect(result.panelTypes).toBe(panelType); + }); +}); + +describe('Stacking bar in new panel', () => { + it('New panel should have stack bar - true by default', () => { + // Mock useSearchParams to return the expected values + (useSearchParams as jest.Mock).mockReturnValue([ + new URLSearchParams(MOCK_SEARCH_PARAMS), + jest.fn(), + ]); + + const { container, getByText, getByRole } = render( + + + + + + + , + ); + + // Verify label is present + expect(getByText('Stack series')).toBeInTheDocument(); + + // Verify section exists + const section = container.querySelector('section > .stack-chart'); + expect(section).toBeInTheDocument(); + + // Verify switch is present and enabled (ant-switch-checked) + const switchBtn = section?.querySelector('.ant-switch'); + expect(switchBtn).toBeInTheDocument(); + expect(switchBtn).toHaveClass('ant-switch-checked'); + + // (Optional) More semantic: verify by role + expect(getByRole('switch')).toBeChecked(); + }); +}); diff --git a/frontend/src/container/NewWidget/index.tsx b/frontend/src/container/NewWidget/index.tsx index f5ad39688734..290b8d359b23 100644 --- a/frontend/src/container/NewWidget/index.tsx +++ b/frontend/src/container/NewWidget/index.tsx @@ -178,10 +178,6 @@ function NewWidget({ selectedWidget?.yAxisUnit || 'none', ); - const [stacked, setStacked] = useState( - selectedWidget?.isStacked || false, - ); - const [stackedBarChart, setStackedBarChart] = useState( selectedWidget?.stackedBarChart || false, ); @@ -258,7 +254,6 @@ function NewWidget({ query: currentQuery, title, description, - isStacked: stacked, opacity, nullZeroValues: selectedNullZeroValue, yAxisUnit, @@ -292,7 +287,6 @@ function NewWidget({ selectedTracesFields, softMax, softMin, - stacked, thresholds, title, yAxisUnit, @@ -494,7 +488,6 @@ function NewWidget({ ...(selectedWidget || ({} as Widgets)), description: selectedWidget?.description || '', timePreferance: selectedTime.enum, - isStacked: selectedWidget?.isStacked || false, opacity: selectedWidget?.opacity || '1', nullZeroValues: selectedWidget?.nullZeroValues || 'zero', title: selectedWidget?.title, @@ -524,7 +517,6 @@ function NewWidget({ ...(selectedWidget || ({} as Widgets)), description: selectedWidget?.description || '', timePreferance: selectedTime.enum, - isStacked: selectedWidget?.isStacked || false, opacity: selectedWidget?.opacity || '1', nullZeroValues: selectedWidget?.nullZeroValues || 'zero', title: selectedWidget?.title, @@ -818,8 +810,6 @@ function NewWidget({ setTitle={setTitle} description={description} setDescription={setDescription} - stacked={stacked} - setStacked={setStacked} stackedBarChart={stackedBarChart} setStackedBarChart={setStackedBarChart} opacity={opacity} diff --git a/frontend/src/container/NewWidget/utils.ts b/frontend/src/container/NewWidget/utils.ts index 1be37cf5a433..918324b22bab 100644 --- a/frontend/src/container/NewWidget/utils.ts +++ b/frontend/src/container/NewWidget/utils.ts @@ -543,7 +543,6 @@ export const getDefaultWidgetData = ( id, title: '', description: '', - isStacked: false, nullZeroValues: '', opacity: '', panelTypes: name, @@ -554,6 +553,7 @@ export const getDefaultWidgetData = ( timePreferance: 'GLOBAL_TIME', softMax: null, softMin: null, + stackedBarChart: true, selectedLogFields: defaultLogsSelectedColumns.map((field) => ({ ...field, type: field.fieldContext ?? '', diff --git a/frontend/src/container/PanelWrapper/__tests__/tablePanelWrapperHelper.ts b/frontend/src/container/PanelWrapper/__tests__/tablePanelWrapperHelper.ts index f20df538a77e..ca44be8268ad 100644 --- a/frontend/src/container/PanelWrapper/__tests__/tablePanelWrapperHelper.ts +++ b/frontend/src/container/PanelWrapper/__tests__/tablePanelWrapperHelper.ts @@ -2,7 +2,6 @@ export const tablePanelWidgetQuery = { id: '727533b0-7718-4f99-a1db-a1875649325c', title: '', description: '', - isStacked: false, nullZeroValues: 'zero', opacity: '1', panelTypes: 'table', diff --git a/frontend/src/container/PanelWrapper/__tests__/valuePanelWrapperHelper.ts b/frontend/src/container/PanelWrapper/__tests__/valuePanelWrapperHelper.ts index 4a88f4b9f3cb..83ce4336e895 100644 --- a/frontend/src/container/PanelWrapper/__tests__/valuePanelWrapperHelper.ts +++ b/frontend/src/container/PanelWrapper/__tests__/valuePanelWrapperHelper.ts @@ -2,7 +2,6 @@ export const valuePanelWidget = { id: 'b8b93086-ef01-47bf-9044-1e7abd583be4', title: 'signoz latency in ms', description: '', - isStacked: false, nullZeroValues: 'zero', opacity: '1', panelTypes: 'value', diff --git a/frontend/src/container/QueryTable/__test__/mocks.ts b/frontend/src/container/QueryTable/__test__/mocks.ts index 8c2758508831..38fb2c92682f 100644 --- a/frontend/src/container/QueryTable/__test__/mocks.ts +++ b/frontend/src/container/QueryTable/__test__/mocks.ts @@ -236,7 +236,6 @@ export const WidgetHeaderProps: any = { description: '', fillSpans: false, id: 'add65f0d-7662-4024-af51-da567759235d', - isStacked: false, mergeAllActiveQueries: false, nullZeroValues: 'zero', opacity: '1', diff --git a/frontend/src/hooks/dashboard/utils.ts b/frontend/src/hooks/dashboard/utils.ts index f9f4267baf29..39bd00f2d1b7 100644 --- a/frontend/src/hooks/dashboard/utils.ts +++ b/frontend/src/hooks/dashboard/utils.ts @@ -52,7 +52,6 @@ export const addEmptyWidgetInDashboardJSONWithQuery = ( id: widgetId, query, description: '', - isStacked: false, nullZeroValues: '', opacity: '', title: '', diff --git a/frontend/src/lib/uPlotLib/utils/getSeriesData.ts b/frontend/src/lib/uPlotLib/utils/getSeriesData.ts index 1ae9593474fd..8dff3d445c93 100644 --- a/frontend/src/lib/uPlotLib/utils/getSeriesData.ts +++ b/frontend/src/lib/uPlotLib/utils/getSeriesData.ts @@ -86,7 +86,7 @@ const getSeries = ({ ? hiddenGraph[i] : true, label, - fill: panelType && panelType === PANEL_TYPES.BAR ? `${color}40` : undefined, + fill: panelType && panelType === PANEL_TYPES.BAR ? `${color}` : undefined, stroke: color, width: 2, spanGaps: true, diff --git a/frontend/src/lib/uPlotLib/utils/tests/getSeriesData.test.ts b/frontend/src/lib/uPlotLib/utils/tests/getSeriesData.test.ts index 199df6f540fc..b0a900bf2b48 100644 --- a/frontend/src/lib/uPlotLib/utils/tests/getSeriesData.test.ts +++ b/frontend/src/lib/uPlotLib/utils/tests/getSeriesData.test.ts @@ -14,7 +14,7 @@ describe('Get Series Data', () => { expect(seriesData.length).toBe(5); expect(seriesData[1].label).toBe('firstLegend'); expect(seriesData[1].show).toBe(true); - expect(seriesData[1].fill).toBe('#C7158540'); + expect(seriesData[1].fill).toBe('#C71585'); expect(seriesData[1].width).toBe(2); }); diff --git a/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil.ts b/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil.ts index 974b0c176493..fa477028a19b 100644 --- a/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil.ts +++ b/frontend/src/pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil.ts @@ -39,7 +39,6 @@ export const getWidgetQueryBuilder = ({ }: GetWidgetQueryPropsReturn): Widgets => ({ description: description || '', id: id || uuid(), - isStacked: false, nullZeroValues: nullZeroValues || '', opacity: '1', panelTypes, diff --git a/frontend/src/types/api/dashboard/getAll.ts b/frontend/src/types/api/dashboard/getAll.ts index 82dc5cc89541..3d8304e24aea 100644 --- a/frontend/src/types/api/dashboard/getAll.ts +++ b/frontend/src/types/api/dashboard/getAll.ts @@ -104,7 +104,6 @@ export interface ColumnUnit { [key: string]: string; } export interface IBaseWidget { - isStacked: boolean; id: string; panelTypes: PANEL_TYPES; title: ReactNode; From c477ec65da88484f55574b25eba1838de906e42e Mon Sep 17 00:00:00 2001 From: Abhi kumar Date: Thu, 11 Sep 2025 12:55:13 +0530 Subject: [PATCH 15/51] feat: added support for hasToken function in QB (#9058) * feat: updated grammer to add hasToken function * feat: added function constant --- .../QueryV2/QuerySearch/QuerySearch.tsx | 11 +- frontend/src/constants/antlrQueryConstants.ts | 1 + frontend/src/parser/FilterQuery.interp | 4 +- frontend/src/parser/FilterQuery.tokens | 19 +- frontend/src/parser/FilterQueryLexer.interp | 5 +- frontend/src/parser/FilterQueryLexer.tokens | 19 +- frontend/src/parser/FilterQueryLexer.ts | 235 +++++++++--------- frontend/src/parser/FilterQueryListener.ts | 3 +- frontend/src/parser/FilterQueryParser.ts | 42 ++-- frontend/src/parser/FilterQueryVisitor.ts | 3 +- frontend/src/utils/tokenUtils.ts | 1 + 11 files changed, 183 insertions(+), 160 deletions(-) diff --git a/frontend/src/components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch.tsx b/frontend/src/components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch.tsx index 38197c0722e1..24a66c3e630d 100644 --- a/frontend/src/components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch.tsx +++ b/frontend/src/components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch.tsx @@ -23,6 +23,7 @@ import cx from 'classnames'; import { negationQueryOperatorSuggestions, OPERATORS, + QUERY_BUILDER_FUNCTIONS, QUERY_BUILDER_KEY_TYPES, QUERY_BUILDER_OPERATORS_BY_KEY_TYPE, queryOperatorSuggestions, @@ -1076,11 +1077,11 @@ function QuerySearch({ } if (queryContext.isInFunction) { - options = [ - { label: 'HAS', type: 'function' }, - { label: 'HASANY', type: 'function' }, - { label: 'HASALL', type: 'function' }, - ]; + options = Object.values(QUERY_BUILDER_FUNCTIONS).map((option) => ({ + label: option, + apply: `${option}()`, + type: 'function', + })); // Add space after selection for functions const optionsWithSpace = addSpaceToOptions(options); diff --git a/frontend/src/constants/antlrQueryConstants.ts b/frontend/src/constants/antlrQueryConstants.ts index 3c5b87abba8b..be3afddf09d0 100644 --- a/frontend/src/constants/antlrQueryConstants.ts +++ b/frontend/src/constants/antlrQueryConstants.ts @@ -42,6 +42,7 @@ export const QUERY_BUILDER_FUNCTIONS = { HAS: 'has', HASANY: 'hasAny', HASALL: 'hasAll', + HASTOKEN: 'hasToken', }; export function negateOperator(operatorOrFunction: string): string { diff --git a/frontend/src/parser/FilterQuery.interp b/frontend/src/parser/FilterQuery.interp index 83105ce119b2..3c22ad32449c 100644 --- a/frontend/src/parser/FilterQuery.interp +++ b/frontend/src/parser/FilterQuery.interp @@ -31,6 +31,7 @@ null null null null +null token symbolic names: null @@ -56,6 +57,7 @@ IN NOT AND OR +HASTOKEN HAS HASANY HASALL @@ -87,4 +89,4 @@ key atn: -[4, 1, 31, 219, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 5, 2, 43, 8, 2, 10, 2, 12, 2, 46, 9, 2, 1, 3, 1, 3, 1, 3, 1, 3, 5, 3, 52, 8, 3, 10, 3, 12, 3, 55, 9, 3, 1, 4, 3, 4, 58, 8, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 71, 8, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 150, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 164, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 181, 8, 8, 1, 9, 1, 9, 1, 9, 5, 9, 186, 8, 9, 10, 9, 12, 9, 189, 9, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 201, 8, 12, 10, 12, 12, 12, 204, 9, 12, 1, 13, 1, 13, 1, 13, 3, 13, 209, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 0, 0, 17, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 0, 5, 1, 0, 7, 8, 1, 0, 13, 14, 2, 0, 28, 28, 31, 31, 1, 0, 23, 25, 1, 0, 26, 29, 235, 0, 34, 1, 0, 0, 0, 2, 37, 1, 0, 0, 0, 4, 39, 1, 0, 0, 0, 6, 47, 1, 0, 0, 0, 8, 57, 1, 0, 0, 0, 10, 70, 1, 0, 0, 0, 12, 149, 1, 0, 0, 0, 14, 163, 1, 0, 0, 0, 16, 180, 1, 0, 0, 0, 18, 182, 1, 0, 0, 0, 20, 190, 1, 0, 0, 0, 22, 192, 1, 0, 0, 0, 24, 197, 1, 0, 0, 0, 26, 208, 1, 0, 0, 0, 28, 210, 1, 0, 0, 0, 30, 214, 1, 0, 0, 0, 32, 216, 1, 0, 0, 0, 34, 35, 3, 2, 1, 0, 35, 36, 5, 0, 0, 1, 36, 1, 1, 0, 0, 0, 37, 38, 3, 4, 2, 0, 38, 3, 1, 0, 0, 0, 39, 44, 3, 6, 3, 0, 40, 41, 5, 22, 0, 0, 41, 43, 3, 6, 3, 0, 42, 40, 1, 0, 0, 0, 43, 46, 1, 0, 0, 0, 44, 42, 1, 0, 0, 0, 44, 45, 1, 0, 0, 0, 45, 5, 1, 0, 0, 0, 46, 44, 1, 0, 0, 0, 47, 53, 3, 8, 4, 0, 48, 49, 5, 21, 0, 0, 49, 52, 3, 8, 4, 0, 50, 52, 3, 8, 4, 0, 51, 48, 1, 0, 0, 0, 51, 50, 1, 0, 0, 0, 52, 55, 1, 0, 0, 0, 53, 51, 1, 0, 0, 0, 53, 54, 1, 0, 0, 0, 54, 7, 1, 0, 0, 0, 55, 53, 1, 0, 0, 0, 56, 58, 5, 20, 0, 0, 57, 56, 1, 0, 0, 0, 57, 58, 1, 0, 0, 0, 58, 59, 1, 0, 0, 0, 59, 60, 3, 10, 5, 0, 60, 9, 1, 0, 0, 0, 61, 62, 5, 1, 0, 0, 62, 63, 3, 4, 2, 0, 63, 64, 5, 2, 0, 0, 64, 71, 1, 0, 0, 0, 65, 71, 3, 12, 6, 0, 66, 71, 3, 22, 11, 0, 67, 71, 3, 20, 10, 0, 68, 71, 3, 32, 16, 0, 69, 71, 3, 30, 15, 0, 70, 61, 1, 0, 0, 0, 70, 65, 1, 0, 0, 0, 70, 66, 1, 0, 0, 0, 70, 67, 1, 0, 0, 0, 70, 68, 1, 0, 0, 0, 70, 69, 1, 0, 0, 0, 71, 11, 1, 0, 0, 0, 72, 73, 3, 32, 16, 0, 73, 74, 5, 6, 0, 0, 74, 75, 3, 30, 15, 0, 75, 150, 1, 0, 0, 0, 76, 77, 3, 32, 16, 0, 77, 78, 7, 0, 0, 0, 78, 79, 3, 30, 15, 0, 79, 150, 1, 0, 0, 0, 80, 81, 3, 32, 16, 0, 81, 82, 5, 9, 0, 0, 82, 83, 3, 30, 15, 0, 83, 150, 1, 0, 0, 0, 84, 85, 3, 32, 16, 0, 85, 86, 5, 10, 0, 0, 86, 87, 3, 30, 15, 0, 87, 150, 1, 0, 0, 0, 88, 89, 3, 32, 16, 0, 89, 90, 5, 11, 0, 0, 90, 91, 3, 30, 15, 0, 91, 150, 1, 0, 0, 0, 92, 93, 3, 32, 16, 0, 93, 94, 5, 12, 0, 0, 94, 95, 3, 30, 15, 0, 95, 150, 1, 0, 0, 0, 96, 97, 3, 32, 16, 0, 97, 98, 7, 1, 0, 0, 98, 99, 3, 30, 15, 0, 99, 150, 1, 0, 0, 0, 100, 101, 3, 32, 16, 0, 101, 102, 5, 20, 0, 0, 102, 103, 7, 1, 0, 0, 103, 104, 3, 30, 15, 0, 104, 150, 1, 0, 0, 0, 105, 106, 3, 32, 16, 0, 106, 107, 5, 15, 0, 0, 107, 108, 3, 30, 15, 0, 108, 109, 5, 21, 0, 0, 109, 110, 3, 30, 15, 0, 110, 150, 1, 0, 0, 0, 111, 112, 3, 32, 16, 0, 112, 113, 5, 20, 0, 0, 113, 114, 5, 15, 0, 0, 114, 115, 3, 30, 15, 0, 115, 116, 5, 21, 0, 0, 116, 117, 3, 30, 15, 0, 117, 150, 1, 0, 0, 0, 118, 119, 3, 32, 16, 0, 119, 120, 3, 14, 7, 0, 120, 150, 1, 0, 0, 0, 121, 122, 3, 32, 16, 0, 122, 123, 3, 16, 8, 0, 123, 150, 1, 0, 0, 0, 124, 125, 3, 32, 16, 0, 125, 126, 5, 16, 0, 0, 126, 150, 1, 0, 0, 0, 127, 128, 3, 32, 16, 0, 128, 129, 5, 20, 0, 0, 129, 130, 5, 16, 0, 0, 130, 150, 1, 0, 0, 0, 131, 132, 3, 32, 16, 0, 132, 133, 5, 17, 0, 0, 133, 134, 3, 30, 15, 0, 134, 150, 1, 0, 0, 0, 135, 136, 3, 32, 16, 0, 136, 137, 5, 20, 0, 0, 137, 138, 5, 17, 0, 0, 138, 139, 3, 30, 15, 0, 139, 150, 1, 0, 0, 0, 140, 141, 3, 32, 16, 0, 141, 142, 5, 18, 0, 0, 142, 143, 3, 30, 15, 0, 143, 150, 1, 0, 0, 0, 144, 145, 3, 32, 16, 0, 145, 146, 5, 20, 0, 0, 146, 147, 5, 18, 0, 0, 147, 148, 3, 30, 15, 0, 148, 150, 1, 0, 0, 0, 149, 72, 1, 0, 0, 0, 149, 76, 1, 0, 0, 0, 149, 80, 1, 0, 0, 0, 149, 84, 1, 0, 0, 0, 149, 88, 1, 0, 0, 0, 149, 92, 1, 0, 0, 0, 149, 96, 1, 0, 0, 0, 149, 100, 1, 0, 0, 0, 149, 105, 1, 0, 0, 0, 149, 111, 1, 0, 0, 0, 149, 118, 1, 0, 0, 0, 149, 121, 1, 0, 0, 0, 149, 124, 1, 0, 0, 0, 149, 127, 1, 0, 0, 0, 149, 131, 1, 0, 0, 0, 149, 135, 1, 0, 0, 0, 149, 140, 1, 0, 0, 0, 149, 144, 1, 0, 0, 0, 150, 13, 1, 0, 0, 0, 151, 152, 5, 19, 0, 0, 152, 153, 5, 1, 0, 0, 153, 154, 3, 18, 9, 0, 154, 155, 5, 2, 0, 0, 155, 164, 1, 0, 0, 0, 156, 157, 5, 19, 0, 0, 157, 158, 5, 3, 0, 0, 158, 159, 3, 18, 9, 0, 159, 160, 5, 4, 0, 0, 160, 164, 1, 0, 0, 0, 161, 162, 5, 19, 0, 0, 162, 164, 3, 30, 15, 0, 163, 151, 1, 0, 0, 0, 163, 156, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 164, 15, 1, 0, 0, 0, 165, 166, 5, 20, 0, 0, 166, 167, 5, 19, 0, 0, 167, 168, 5, 1, 0, 0, 168, 169, 3, 18, 9, 0, 169, 170, 5, 2, 0, 0, 170, 181, 1, 0, 0, 0, 171, 172, 5, 20, 0, 0, 172, 173, 5, 19, 0, 0, 173, 174, 5, 3, 0, 0, 174, 175, 3, 18, 9, 0, 175, 176, 5, 4, 0, 0, 176, 181, 1, 0, 0, 0, 177, 178, 5, 20, 0, 0, 178, 179, 5, 19, 0, 0, 179, 181, 3, 30, 15, 0, 180, 165, 1, 0, 0, 0, 180, 171, 1, 0, 0, 0, 180, 177, 1, 0, 0, 0, 181, 17, 1, 0, 0, 0, 182, 187, 3, 30, 15, 0, 183, 184, 5, 5, 0, 0, 184, 186, 3, 30, 15, 0, 185, 183, 1, 0, 0, 0, 186, 189, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 19, 1, 0, 0, 0, 189, 187, 1, 0, 0, 0, 190, 191, 7, 2, 0, 0, 191, 21, 1, 0, 0, 0, 192, 193, 7, 3, 0, 0, 193, 194, 5, 1, 0, 0, 194, 195, 3, 24, 12, 0, 195, 196, 5, 2, 0, 0, 196, 23, 1, 0, 0, 0, 197, 202, 3, 26, 13, 0, 198, 199, 5, 5, 0, 0, 199, 201, 3, 26, 13, 0, 200, 198, 1, 0, 0, 0, 201, 204, 1, 0, 0, 0, 202, 200, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 25, 1, 0, 0, 0, 204, 202, 1, 0, 0, 0, 205, 209, 3, 32, 16, 0, 206, 209, 3, 30, 15, 0, 207, 209, 3, 28, 14, 0, 208, 205, 1, 0, 0, 0, 208, 206, 1, 0, 0, 0, 208, 207, 1, 0, 0, 0, 209, 27, 1, 0, 0, 0, 210, 211, 5, 3, 0, 0, 211, 212, 3, 18, 9, 0, 212, 213, 5, 4, 0, 0, 213, 29, 1, 0, 0, 0, 214, 215, 7, 4, 0, 0, 215, 31, 1, 0, 0, 0, 216, 217, 5, 29, 0, 0, 217, 33, 1, 0, 0, 0, 11, 44, 51, 53, 57, 70, 149, 163, 180, 187, 202, 208] \ No newline at end of file +[4, 1, 32, 219, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 5, 2, 43, 8, 2, 10, 2, 12, 2, 46, 9, 2, 1, 3, 1, 3, 1, 3, 1, 3, 5, 3, 52, 8, 3, 10, 3, 12, 3, 55, 9, 3, 1, 4, 3, 4, 58, 8, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 71, 8, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 150, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 164, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 181, 8, 8, 1, 9, 1, 9, 1, 9, 5, 9, 186, 8, 9, 10, 9, 12, 9, 189, 9, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 201, 8, 12, 10, 12, 12, 12, 204, 9, 12, 1, 13, 1, 13, 1, 13, 3, 13, 209, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 0, 0, 17, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 0, 5, 1, 0, 7, 8, 1, 0, 13, 14, 2, 0, 29, 29, 32, 32, 1, 0, 23, 26, 1, 0, 27, 30, 235, 0, 34, 1, 0, 0, 0, 2, 37, 1, 0, 0, 0, 4, 39, 1, 0, 0, 0, 6, 47, 1, 0, 0, 0, 8, 57, 1, 0, 0, 0, 10, 70, 1, 0, 0, 0, 12, 149, 1, 0, 0, 0, 14, 163, 1, 0, 0, 0, 16, 180, 1, 0, 0, 0, 18, 182, 1, 0, 0, 0, 20, 190, 1, 0, 0, 0, 22, 192, 1, 0, 0, 0, 24, 197, 1, 0, 0, 0, 26, 208, 1, 0, 0, 0, 28, 210, 1, 0, 0, 0, 30, 214, 1, 0, 0, 0, 32, 216, 1, 0, 0, 0, 34, 35, 3, 2, 1, 0, 35, 36, 5, 0, 0, 1, 36, 1, 1, 0, 0, 0, 37, 38, 3, 4, 2, 0, 38, 3, 1, 0, 0, 0, 39, 44, 3, 6, 3, 0, 40, 41, 5, 22, 0, 0, 41, 43, 3, 6, 3, 0, 42, 40, 1, 0, 0, 0, 43, 46, 1, 0, 0, 0, 44, 42, 1, 0, 0, 0, 44, 45, 1, 0, 0, 0, 45, 5, 1, 0, 0, 0, 46, 44, 1, 0, 0, 0, 47, 53, 3, 8, 4, 0, 48, 49, 5, 21, 0, 0, 49, 52, 3, 8, 4, 0, 50, 52, 3, 8, 4, 0, 51, 48, 1, 0, 0, 0, 51, 50, 1, 0, 0, 0, 52, 55, 1, 0, 0, 0, 53, 51, 1, 0, 0, 0, 53, 54, 1, 0, 0, 0, 54, 7, 1, 0, 0, 0, 55, 53, 1, 0, 0, 0, 56, 58, 5, 20, 0, 0, 57, 56, 1, 0, 0, 0, 57, 58, 1, 0, 0, 0, 58, 59, 1, 0, 0, 0, 59, 60, 3, 10, 5, 0, 60, 9, 1, 0, 0, 0, 61, 62, 5, 1, 0, 0, 62, 63, 3, 4, 2, 0, 63, 64, 5, 2, 0, 0, 64, 71, 1, 0, 0, 0, 65, 71, 3, 12, 6, 0, 66, 71, 3, 22, 11, 0, 67, 71, 3, 20, 10, 0, 68, 71, 3, 32, 16, 0, 69, 71, 3, 30, 15, 0, 70, 61, 1, 0, 0, 0, 70, 65, 1, 0, 0, 0, 70, 66, 1, 0, 0, 0, 70, 67, 1, 0, 0, 0, 70, 68, 1, 0, 0, 0, 70, 69, 1, 0, 0, 0, 71, 11, 1, 0, 0, 0, 72, 73, 3, 32, 16, 0, 73, 74, 5, 6, 0, 0, 74, 75, 3, 30, 15, 0, 75, 150, 1, 0, 0, 0, 76, 77, 3, 32, 16, 0, 77, 78, 7, 0, 0, 0, 78, 79, 3, 30, 15, 0, 79, 150, 1, 0, 0, 0, 80, 81, 3, 32, 16, 0, 81, 82, 5, 9, 0, 0, 82, 83, 3, 30, 15, 0, 83, 150, 1, 0, 0, 0, 84, 85, 3, 32, 16, 0, 85, 86, 5, 10, 0, 0, 86, 87, 3, 30, 15, 0, 87, 150, 1, 0, 0, 0, 88, 89, 3, 32, 16, 0, 89, 90, 5, 11, 0, 0, 90, 91, 3, 30, 15, 0, 91, 150, 1, 0, 0, 0, 92, 93, 3, 32, 16, 0, 93, 94, 5, 12, 0, 0, 94, 95, 3, 30, 15, 0, 95, 150, 1, 0, 0, 0, 96, 97, 3, 32, 16, 0, 97, 98, 7, 1, 0, 0, 98, 99, 3, 30, 15, 0, 99, 150, 1, 0, 0, 0, 100, 101, 3, 32, 16, 0, 101, 102, 5, 20, 0, 0, 102, 103, 7, 1, 0, 0, 103, 104, 3, 30, 15, 0, 104, 150, 1, 0, 0, 0, 105, 106, 3, 32, 16, 0, 106, 107, 5, 15, 0, 0, 107, 108, 3, 30, 15, 0, 108, 109, 5, 21, 0, 0, 109, 110, 3, 30, 15, 0, 110, 150, 1, 0, 0, 0, 111, 112, 3, 32, 16, 0, 112, 113, 5, 20, 0, 0, 113, 114, 5, 15, 0, 0, 114, 115, 3, 30, 15, 0, 115, 116, 5, 21, 0, 0, 116, 117, 3, 30, 15, 0, 117, 150, 1, 0, 0, 0, 118, 119, 3, 32, 16, 0, 119, 120, 3, 14, 7, 0, 120, 150, 1, 0, 0, 0, 121, 122, 3, 32, 16, 0, 122, 123, 3, 16, 8, 0, 123, 150, 1, 0, 0, 0, 124, 125, 3, 32, 16, 0, 125, 126, 5, 16, 0, 0, 126, 150, 1, 0, 0, 0, 127, 128, 3, 32, 16, 0, 128, 129, 5, 20, 0, 0, 129, 130, 5, 16, 0, 0, 130, 150, 1, 0, 0, 0, 131, 132, 3, 32, 16, 0, 132, 133, 5, 17, 0, 0, 133, 134, 3, 30, 15, 0, 134, 150, 1, 0, 0, 0, 135, 136, 3, 32, 16, 0, 136, 137, 5, 20, 0, 0, 137, 138, 5, 17, 0, 0, 138, 139, 3, 30, 15, 0, 139, 150, 1, 0, 0, 0, 140, 141, 3, 32, 16, 0, 141, 142, 5, 18, 0, 0, 142, 143, 3, 30, 15, 0, 143, 150, 1, 0, 0, 0, 144, 145, 3, 32, 16, 0, 145, 146, 5, 20, 0, 0, 146, 147, 5, 18, 0, 0, 147, 148, 3, 30, 15, 0, 148, 150, 1, 0, 0, 0, 149, 72, 1, 0, 0, 0, 149, 76, 1, 0, 0, 0, 149, 80, 1, 0, 0, 0, 149, 84, 1, 0, 0, 0, 149, 88, 1, 0, 0, 0, 149, 92, 1, 0, 0, 0, 149, 96, 1, 0, 0, 0, 149, 100, 1, 0, 0, 0, 149, 105, 1, 0, 0, 0, 149, 111, 1, 0, 0, 0, 149, 118, 1, 0, 0, 0, 149, 121, 1, 0, 0, 0, 149, 124, 1, 0, 0, 0, 149, 127, 1, 0, 0, 0, 149, 131, 1, 0, 0, 0, 149, 135, 1, 0, 0, 0, 149, 140, 1, 0, 0, 0, 149, 144, 1, 0, 0, 0, 150, 13, 1, 0, 0, 0, 151, 152, 5, 19, 0, 0, 152, 153, 5, 1, 0, 0, 153, 154, 3, 18, 9, 0, 154, 155, 5, 2, 0, 0, 155, 164, 1, 0, 0, 0, 156, 157, 5, 19, 0, 0, 157, 158, 5, 3, 0, 0, 158, 159, 3, 18, 9, 0, 159, 160, 5, 4, 0, 0, 160, 164, 1, 0, 0, 0, 161, 162, 5, 19, 0, 0, 162, 164, 3, 30, 15, 0, 163, 151, 1, 0, 0, 0, 163, 156, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 164, 15, 1, 0, 0, 0, 165, 166, 5, 20, 0, 0, 166, 167, 5, 19, 0, 0, 167, 168, 5, 1, 0, 0, 168, 169, 3, 18, 9, 0, 169, 170, 5, 2, 0, 0, 170, 181, 1, 0, 0, 0, 171, 172, 5, 20, 0, 0, 172, 173, 5, 19, 0, 0, 173, 174, 5, 3, 0, 0, 174, 175, 3, 18, 9, 0, 175, 176, 5, 4, 0, 0, 176, 181, 1, 0, 0, 0, 177, 178, 5, 20, 0, 0, 178, 179, 5, 19, 0, 0, 179, 181, 3, 30, 15, 0, 180, 165, 1, 0, 0, 0, 180, 171, 1, 0, 0, 0, 180, 177, 1, 0, 0, 0, 181, 17, 1, 0, 0, 0, 182, 187, 3, 30, 15, 0, 183, 184, 5, 5, 0, 0, 184, 186, 3, 30, 15, 0, 185, 183, 1, 0, 0, 0, 186, 189, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 19, 1, 0, 0, 0, 189, 187, 1, 0, 0, 0, 190, 191, 7, 2, 0, 0, 191, 21, 1, 0, 0, 0, 192, 193, 7, 3, 0, 0, 193, 194, 5, 1, 0, 0, 194, 195, 3, 24, 12, 0, 195, 196, 5, 2, 0, 0, 196, 23, 1, 0, 0, 0, 197, 202, 3, 26, 13, 0, 198, 199, 5, 5, 0, 0, 199, 201, 3, 26, 13, 0, 200, 198, 1, 0, 0, 0, 201, 204, 1, 0, 0, 0, 202, 200, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 25, 1, 0, 0, 0, 204, 202, 1, 0, 0, 0, 205, 209, 3, 32, 16, 0, 206, 209, 3, 30, 15, 0, 207, 209, 3, 28, 14, 0, 208, 205, 1, 0, 0, 0, 208, 206, 1, 0, 0, 0, 208, 207, 1, 0, 0, 0, 209, 27, 1, 0, 0, 0, 210, 211, 5, 3, 0, 0, 211, 212, 3, 18, 9, 0, 212, 213, 5, 4, 0, 0, 213, 29, 1, 0, 0, 0, 214, 215, 7, 4, 0, 0, 215, 31, 1, 0, 0, 0, 216, 217, 5, 30, 0, 0, 217, 33, 1, 0, 0, 0, 11, 44, 51, 53, 57, 70, 149, 163, 180, 187, 202, 208] \ No newline at end of file diff --git a/frontend/src/parser/FilterQuery.tokens b/frontend/src/parser/FilterQuery.tokens index 4df881075f0a..615858f49676 100644 --- a/frontend/src/parser/FilterQuery.tokens +++ b/frontend/src/parser/FilterQuery.tokens @@ -20,15 +20,16 @@ IN=19 NOT=20 AND=21 OR=22 -HAS=23 -HASANY=24 -HASALL=25 -BOOL=26 -NUMBER=27 -QUOTED_TEXT=28 -KEY=29 -WS=30 -FREETEXT=31 +HASTOKEN=23 +HAS=24 +HASANY=25 +HASALL=26 +BOOL=27 +NUMBER=28 +QUOTED_TEXT=29 +KEY=30 +WS=31 +FREETEXT=32 '('=1 ')'=2 '['=3 diff --git a/frontend/src/parser/FilterQueryLexer.interp b/frontend/src/parser/FilterQueryLexer.interp index 3b149e9131c8..6f3d2d7e1697 100644 --- a/frontend/src/parser/FilterQueryLexer.interp +++ b/frontend/src/parser/FilterQueryLexer.interp @@ -31,6 +31,7 @@ null null null null +null token symbolic names: null @@ -56,6 +57,7 @@ IN NOT AND OR +HASTOKEN HAS HASANY HASALL @@ -89,6 +91,7 @@ IN NOT AND OR +HASTOKEN HAS HASANY HASALL @@ -112,4 +115,4 @@ mode names: DEFAULT_MODE atn: -[4, 0, 31, 303, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 87, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 130, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 147, 8, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 3, 25, 190, 8, 25, 1, 26, 1, 26, 1, 27, 3, 27, 195, 8, 27, 1, 27, 4, 27, 198, 8, 27, 11, 27, 12, 27, 199, 1, 27, 1, 27, 5, 27, 204, 8, 27, 10, 27, 12, 27, 207, 9, 27, 3, 27, 209, 8, 27, 1, 27, 1, 27, 3, 27, 213, 8, 27, 1, 27, 4, 27, 216, 8, 27, 11, 27, 12, 27, 217, 3, 27, 220, 8, 27, 1, 27, 3, 27, 223, 8, 27, 1, 27, 1, 27, 4, 27, 227, 8, 27, 11, 27, 12, 27, 228, 1, 27, 1, 27, 3, 27, 233, 8, 27, 1, 27, 4, 27, 236, 8, 27, 11, 27, 12, 27, 237, 3, 27, 240, 8, 27, 3, 27, 242, 8, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 248, 8, 28, 10, 28, 12, 28, 251, 9, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 258, 8, 28, 10, 28, 12, 28, 261, 9, 28, 1, 28, 3, 28, 264, 8, 28, 1, 29, 1, 29, 5, 29, 268, 8, 29, 10, 29, 12, 29, 271, 9, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 285, 8, 32, 10, 32, 12, 32, 288, 9, 32, 1, 33, 4, 33, 291, 8, 33, 11, 33, 12, 33, 292, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 4, 35, 300, 8, 35, 11, 35, 12, 35, 301, 0, 0, 36, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 0, 55, 27, 57, 28, 59, 0, 61, 0, 63, 0, 65, 29, 67, 30, 69, 0, 71, 31, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116, 116, 2, 0, 87, 87, 119, 119, 2, 0, 78, 78, 110, 110, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 79, 79, 111, 111, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 4, 0, 36, 36, 65, 90, 95, 95, 97, 122, 6, 0, 36, 36, 45, 45, 47, 58, 65, 90, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 325, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 1, 73, 1, 0, 0, 0, 3, 75, 1, 0, 0, 0, 5, 77, 1, 0, 0, 0, 7, 79, 1, 0, 0, 0, 9, 81, 1, 0, 0, 0, 11, 86, 1, 0, 0, 0, 13, 88, 1, 0, 0, 0, 15, 91, 1, 0, 0, 0, 17, 94, 1, 0, 0, 0, 19, 96, 1, 0, 0, 0, 21, 99, 1, 0, 0, 0, 23, 101, 1, 0, 0, 0, 25, 104, 1, 0, 0, 0, 27, 109, 1, 0, 0, 0, 29, 115, 1, 0, 0, 0, 31, 123, 1, 0, 0, 0, 33, 131, 1, 0, 0, 0, 35, 138, 1, 0, 0, 0, 37, 148, 1, 0, 0, 0, 39, 151, 1, 0, 0, 0, 41, 155, 1, 0, 0, 0, 43, 159, 1, 0, 0, 0, 45, 162, 1, 0, 0, 0, 47, 166, 1, 0, 0, 0, 49, 173, 1, 0, 0, 0, 51, 189, 1, 0, 0, 0, 53, 191, 1, 0, 0, 0, 55, 241, 1, 0, 0, 0, 57, 263, 1, 0, 0, 0, 59, 265, 1, 0, 0, 0, 61, 272, 1, 0, 0, 0, 63, 275, 1, 0, 0, 0, 65, 279, 1, 0, 0, 0, 67, 290, 1, 0, 0, 0, 69, 296, 1, 0, 0, 0, 71, 299, 1, 0, 0, 0, 73, 74, 5, 40, 0, 0, 74, 2, 1, 0, 0, 0, 75, 76, 5, 41, 0, 0, 76, 4, 1, 0, 0, 0, 77, 78, 5, 91, 0, 0, 78, 6, 1, 0, 0, 0, 79, 80, 5, 93, 0, 0, 80, 8, 1, 0, 0, 0, 81, 82, 5, 44, 0, 0, 82, 10, 1, 0, 0, 0, 83, 87, 5, 61, 0, 0, 84, 85, 5, 61, 0, 0, 85, 87, 5, 61, 0, 0, 86, 83, 1, 0, 0, 0, 86, 84, 1, 0, 0, 0, 87, 12, 1, 0, 0, 0, 88, 89, 5, 33, 0, 0, 89, 90, 5, 61, 0, 0, 90, 14, 1, 0, 0, 0, 91, 92, 5, 60, 0, 0, 92, 93, 5, 62, 0, 0, 93, 16, 1, 0, 0, 0, 94, 95, 5, 60, 0, 0, 95, 18, 1, 0, 0, 0, 96, 97, 5, 60, 0, 0, 97, 98, 5, 61, 0, 0, 98, 20, 1, 0, 0, 0, 99, 100, 5, 62, 0, 0, 100, 22, 1, 0, 0, 0, 101, 102, 5, 62, 0, 0, 102, 103, 5, 61, 0, 0, 103, 24, 1, 0, 0, 0, 104, 105, 7, 0, 0, 0, 105, 106, 7, 1, 0, 0, 106, 107, 7, 2, 0, 0, 107, 108, 7, 3, 0, 0, 108, 26, 1, 0, 0, 0, 109, 110, 7, 1, 0, 0, 110, 111, 7, 0, 0, 0, 111, 112, 7, 1, 0, 0, 112, 113, 7, 2, 0, 0, 113, 114, 7, 3, 0, 0, 114, 28, 1, 0, 0, 0, 115, 116, 7, 4, 0, 0, 116, 117, 7, 3, 0, 0, 117, 118, 7, 5, 0, 0, 118, 119, 7, 6, 0, 0, 119, 120, 7, 3, 0, 0, 120, 121, 7, 3, 0, 0, 121, 122, 7, 7, 0, 0, 122, 30, 1, 0, 0, 0, 123, 124, 7, 3, 0, 0, 124, 125, 7, 8, 0, 0, 125, 126, 7, 1, 0, 0, 126, 127, 7, 9, 0, 0, 127, 129, 7, 5, 0, 0, 128, 130, 7, 9, 0, 0, 129, 128, 1, 0, 0, 0, 129, 130, 1, 0, 0, 0, 130, 32, 1, 0, 0, 0, 131, 132, 7, 10, 0, 0, 132, 133, 7, 3, 0, 0, 133, 134, 7, 11, 0, 0, 134, 135, 7, 3, 0, 0, 135, 136, 7, 8, 0, 0, 136, 137, 7, 12, 0, 0, 137, 34, 1, 0, 0, 0, 138, 139, 7, 13, 0, 0, 139, 140, 7, 14, 0, 0, 140, 141, 7, 7, 0, 0, 141, 142, 7, 5, 0, 0, 142, 143, 7, 15, 0, 0, 143, 144, 7, 1, 0, 0, 144, 146, 7, 7, 0, 0, 145, 147, 7, 9, 0, 0, 146, 145, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, 36, 1, 0, 0, 0, 148, 149, 7, 1, 0, 0, 149, 150, 7, 7, 0, 0, 150, 38, 1, 0, 0, 0, 151, 152, 7, 7, 0, 0, 152, 153, 7, 14, 0, 0, 153, 154, 7, 5, 0, 0, 154, 40, 1, 0, 0, 0, 155, 156, 7, 15, 0, 0, 156, 157, 7, 7, 0, 0, 157, 158, 7, 16, 0, 0, 158, 42, 1, 0, 0, 0, 159, 160, 7, 14, 0, 0, 160, 161, 7, 10, 0, 0, 161, 44, 1, 0, 0, 0, 162, 163, 7, 17, 0, 0, 163, 164, 7, 15, 0, 0, 164, 165, 7, 9, 0, 0, 165, 46, 1, 0, 0, 0, 166, 167, 7, 17, 0, 0, 167, 168, 7, 15, 0, 0, 168, 169, 7, 9, 0, 0, 169, 170, 7, 15, 0, 0, 170, 171, 7, 7, 0, 0, 171, 172, 7, 18, 0, 0, 172, 48, 1, 0, 0, 0, 173, 174, 7, 17, 0, 0, 174, 175, 7, 15, 0, 0, 175, 176, 7, 9, 0, 0, 176, 177, 7, 15, 0, 0, 177, 178, 7, 0, 0, 0, 178, 179, 7, 0, 0, 0, 179, 50, 1, 0, 0, 0, 180, 181, 7, 5, 0, 0, 181, 182, 7, 10, 0, 0, 182, 183, 7, 19, 0, 0, 183, 190, 7, 3, 0, 0, 184, 185, 7, 20, 0, 0, 185, 186, 7, 15, 0, 0, 186, 187, 7, 0, 0, 0, 187, 188, 7, 9, 0, 0, 188, 190, 7, 3, 0, 0, 189, 180, 1, 0, 0, 0, 189, 184, 1, 0, 0, 0, 190, 52, 1, 0, 0, 0, 191, 192, 7, 21, 0, 0, 192, 54, 1, 0, 0, 0, 193, 195, 3, 53, 26, 0, 194, 193, 1, 0, 0, 0, 194, 195, 1, 0, 0, 0, 195, 197, 1, 0, 0, 0, 196, 198, 3, 69, 34, 0, 197, 196, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 197, 1, 0, 0, 0, 199, 200, 1, 0, 0, 0, 200, 208, 1, 0, 0, 0, 201, 205, 5, 46, 0, 0, 202, 204, 3, 69, 34, 0, 203, 202, 1, 0, 0, 0, 204, 207, 1, 0, 0, 0, 205, 203, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 209, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 208, 201, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 219, 1, 0, 0, 0, 210, 212, 7, 3, 0, 0, 211, 213, 3, 53, 26, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 215, 1, 0, 0, 0, 214, 216, 3, 69, 34, 0, 215, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 215, 1, 0, 0, 0, 217, 218, 1, 0, 0, 0, 218, 220, 1, 0, 0, 0, 219, 210, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 242, 1, 0, 0, 0, 221, 223, 3, 53, 26, 0, 222, 221, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 226, 5, 46, 0, 0, 225, 227, 3, 69, 34, 0, 226, 225, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 239, 1, 0, 0, 0, 230, 232, 7, 3, 0, 0, 231, 233, 3, 53, 26, 0, 232, 231, 1, 0, 0, 0, 232, 233, 1, 0, 0, 0, 233, 235, 1, 0, 0, 0, 234, 236, 3, 69, 34, 0, 235, 234, 1, 0, 0, 0, 236, 237, 1, 0, 0, 0, 237, 235, 1, 0, 0, 0, 237, 238, 1, 0, 0, 0, 238, 240, 1, 0, 0, 0, 239, 230, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 242, 1, 0, 0, 0, 241, 194, 1, 0, 0, 0, 241, 222, 1, 0, 0, 0, 242, 56, 1, 0, 0, 0, 243, 249, 5, 34, 0, 0, 244, 248, 8, 22, 0, 0, 245, 246, 5, 92, 0, 0, 246, 248, 9, 0, 0, 0, 247, 244, 1, 0, 0, 0, 247, 245, 1, 0, 0, 0, 248, 251, 1, 0, 0, 0, 249, 247, 1, 0, 0, 0, 249, 250, 1, 0, 0, 0, 250, 252, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 252, 264, 5, 34, 0, 0, 253, 259, 5, 39, 0, 0, 254, 258, 8, 23, 0, 0, 255, 256, 5, 92, 0, 0, 256, 258, 9, 0, 0, 0, 257, 254, 1, 0, 0, 0, 257, 255, 1, 0, 0, 0, 258, 261, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0, 259, 260, 1, 0, 0, 0, 260, 262, 1, 0, 0, 0, 261, 259, 1, 0, 0, 0, 262, 264, 5, 39, 0, 0, 263, 243, 1, 0, 0, 0, 263, 253, 1, 0, 0, 0, 264, 58, 1, 0, 0, 0, 265, 269, 7, 24, 0, 0, 266, 268, 7, 25, 0, 0, 267, 266, 1, 0, 0, 0, 268, 271, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 60, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 272, 273, 5, 91, 0, 0, 273, 274, 5, 93, 0, 0, 274, 62, 1, 0, 0, 0, 275, 276, 5, 91, 0, 0, 276, 277, 5, 42, 0, 0, 277, 278, 5, 93, 0, 0, 278, 64, 1, 0, 0, 0, 279, 286, 3, 59, 29, 0, 280, 281, 5, 46, 0, 0, 281, 285, 3, 59, 29, 0, 282, 285, 3, 61, 30, 0, 283, 285, 3, 63, 31, 0, 284, 280, 1, 0, 0, 0, 284, 282, 1, 0, 0, 0, 284, 283, 1, 0, 0, 0, 285, 288, 1, 0, 0, 0, 286, 284, 1, 0, 0, 0, 286, 287, 1, 0, 0, 0, 287, 66, 1, 0, 0, 0, 288, 286, 1, 0, 0, 0, 289, 291, 7, 26, 0, 0, 290, 289, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 295, 6, 33, 0, 0, 295, 68, 1, 0, 0, 0, 296, 297, 7, 27, 0, 0, 297, 70, 1, 0, 0, 0, 298, 300, 8, 28, 0, 0, 299, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 72, 1, 0, 0, 0, 28, 0, 86, 129, 146, 189, 194, 199, 205, 208, 212, 217, 219, 222, 228, 232, 237, 239, 241, 247, 249, 257, 259, 263, 269, 284, 286, 292, 301, 1, 6, 0, 0] \ No newline at end of file +[4, 0, 32, 314, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 89, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 132, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 149, 8, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 201, 8, 26, 1, 27, 1, 27, 1, 28, 3, 28, 206, 8, 28, 1, 28, 4, 28, 209, 8, 28, 11, 28, 12, 28, 210, 1, 28, 1, 28, 5, 28, 215, 8, 28, 10, 28, 12, 28, 218, 9, 28, 3, 28, 220, 8, 28, 1, 28, 1, 28, 3, 28, 224, 8, 28, 1, 28, 4, 28, 227, 8, 28, 11, 28, 12, 28, 228, 3, 28, 231, 8, 28, 1, 28, 3, 28, 234, 8, 28, 1, 28, 1, 28, 4, 28, 238, 8, 28, 11, 28, 12, 28, 239, 1, 28, 1, 28, 3, 28, 244, 8, 28, 1, 28, 4, 28, 247, 8, 28, 11, 28, 12, 28, 248, 3, 28, 251, 8, 28, 3, 28, 253, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 259, 8, 29, 10, 29, 12, 29, 262, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 269, 8, 29, 10, 29, 12, 29, 272, 9, 29, 1, 29, 3, 29, 275, 8, 29, 1, 30, 1, 30, 5, 30, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 296, 8, 33, 10, 33, 12, 33, 299, 9, 33, 1, 34, 4, 34, 302, 8, 34, 11, 34, 12, 34, 303, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 4, 36, 311, 8, 36, 11, 36, 12, 36, 312, 0, 0, 37, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 0, 57, 28, 59, 29, 61, 0, 63, 0, 65, 0, 67, 30, 69, 31, 71, 0, 73, 32, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116, 116, 2, 0, 87, 87, 119, 119, 2, 0, 78, 78, 110, 110, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 79, 79, 111, 111, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 4, 0, 36, 36, 65, 90, 95, 95, 97, 122, 6, 0, 36, 36, 45, 45, 47, 58, 65, 90, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 336, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 73, 1, 0, 0, 0, 1, 75, 1, 0, 0, 0, 3, 77, 1, 0, 0, 0, 5, 79, 1, 0, 0, 0, 7, 81, 1, 0, 0, 0, 9, 83, 1, 0, 0, 0, 11, 88, 1, 0, 0, 0, 13, 90, 1, 0, 0, 0, 15, 93, 1, 0, 0, 0, 17, 96, 1, 0, 0, 0, 19, 98, 1, 0, 0, 0, 21, 101, 1, 0, 0, 0, 23, 103, 1, 0, 0, 0, 25, 106, 1, 0, 0, 0, 27, 111, 1, 0, 0, 0, 29, 117, 1, 0, 0, 0, 31, 125, 1, 0, 0, 0, 33, 133, 1, 0, 0, 0, 35, 140, 1, 0, 0, 0, 37, 150, 1, 0, 0, 0, 39, 153, 1, 0, 0, 0, 41, 157, 1, 0, 0, 0, 43, 161, 1, 0, 0, 0, 45, 164, 1, 0, 0, 0, 47, 173, 1, 0, 0, 0, 49, 177, 1, 0, 0, 0, 51, 184, 1, 0, 0, 0, 53, 200, 1, 0, 0, 0, 55, 202, 1, 0, 0, 0, 57, 252, 1, 0, 0, 0, 59, 274, 1, 0, 0, 0, 61, 276, 1, 0, 0, 0, 63, 283, 1, 0, 0, 0, 65, 286, 1, 0, 0, 0, 67, 290, 1, 0, 0, 0, 69, 301, 1, 0, 0, 0, 71, 307, 1, 0, 0, 0, 73, 310, 1, 0, 0, 0, 75, 76, 5, 40, 0, 0, 76, 2, 1, 0, 0, 0, 77, 78, 5, 41, 0, 0, 78, 4, 1, 0, 0, 0, 79, 80, 5, 91, 0, 0, 80, 6, 1, 0, 0, 0, 81, 82, 5, 93, 0, 0, 82, 8, 1, 0, 0, 0, 83, 84, 5, 44, 0, 0, 84, 10, 1, 0, 0, 0, 85, 89, 5, 61, 0, 0, 86, 87, 5, 61, 0, 0, 87, 89, 5, 61, 0, 0, 88, 85, 1, 0, 0, 0, 88, 86, 1, 0, 0, 0, 89, 12, 1, 0, 0, 0, 90, 91, 5, 33, 0, 0, 91, 92, 5, 61, 0, 0, 92, 14, 1, 0, 0, 0, 93, 94, 5, 60, 0, 0, 94, 95, 5, 62, 0, 0, 95, 16, 1, 0, 0, 0, 96, 97, 5, 60, 0, 0, 97, 18, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 100, 5, 61, 0, 0, 100, 20, 1, 0, 0, 0, 101, 102, 5, 62, 0, 0, 102, 22, 1, 0, 0, 0, 103, 104, 5, 62, 0, 0, 104, 105, 5, 61, 0, 0, 105, 24, 1, 0, 0, 0, 106, 107, 7, 0, 0, 0, 107, 108, 7, 1, 0, 0, 108, 109, 7, 2, 0, 0, 109, 110, 7, 3, 0, 0, 110, 26, 1, 0, 0, 0, 111, 112, 7, 1, 0, 0, 112, 113, 7, 0, 0, 0, 113, 114, 7, 1, 0, 0, 114, 115, 7, 2, 0, 0, 115, 116, 7, 3, 0, 0, 116, 28, 1, 0, 0, 0, 117, 118, 7, 4, 0, 0, 118, 119, 7, 3, 0, 0, 119, 120, 7, 5, 0, 0, 120, 121, 7, 6, 0, 0, 121, 122, 7, 3, 0, 0, 122, 123, 7, 3, 0, 0, 123, 124, 7, 7, 0, 0, 124, 30, 1, 0, 0, 0, 125, 126, 7, 3, 0, 0, 126, 127, 7, 8, 0, 0, 127, 128, 7, 1, 0, 0, 128, 129, 7, 9, 0, 0, 129, 131, 7, 5, 0, 0, 130, 132, 7, 9, 0, 0, 131, 130, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 32, 1, 0, 0, 0, 133, 134, 7, 10, 0, 0, 134, 135, 7, 3, 0, 0, 135, 136, 7, 11, 0, 0, 136, 137, 7, 3, 0, 0, 137, 138, 7, 8, 0, 0, 138, 139, 7, 12, 0, 0, 139, 34, 1, 0, 0, 0, 140, 141, 7, 13, 0, 0, 141, 142, 7, 14, 0, 0, 142, 143, 7, 7, 0, 0, 143, 144, 7, 5, 0, 0, 144, 145, 7, 15, 0, 0, 145, 146, 7, 1, 0, 0, 146, 148, 7, 7, 0, 0, 147, 149, 7, 9, 0, 0, 148, 147, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, 36, 1, 0, 0, 0, 150, 151, 7, 1, 0, 0, 151, 152, 7, 7, 0, 0, 152, 38, 1, 0, 0, 0, 153, 154, 7, 7, 0, 0, 154, 155, 7, 14, 0, 0, 155, 156, 7, 5, 0, 0, 156, 40, 1, 0, 0, 0, 157, 158, 7, 15, 0, 0, 158, 159, 7, 7, 0, 0, 159, 160, 7, 16, 0, 0, 160, 42, 1, 0, 0, 0, 161, 162, 7, 14, 0, 0, 162, 163, 7, 10, 0, 0, 163, 44, 1, 0, 0, 0, 164, 165, 7, 17, 0, 0, 165, 166, 7, 15, 0, 0, 166, 167, 7, 9, 0, 0, 167, 168, 7, 5, 0, 0, 168, 169, 7, 14, 0, 0, 169, 170, 7, 2, 0, 0, 170, 171, 7, 3, 0, 0, 171, 172, 7, 7, 0, 0, 172, 46, 1, 0, 0, 0, 173, 174, 7, 17, 0, 0, 174, 175, 7, 15, 0, 0, 175, 176, 7, 9, 0, 0, 176, 48, 1, 0, 0, 0, 177, 178, 7, 17, 0, 0, 178, 179, 7, 15, 0, 0, 179, 180, 7, 9, 0, 0, 180, 181, 7, 15, 0, 0, 181, 182, 7, 7, 0, 0, 182, 183, 7, 18, 0, 0, 183, 50, 1, 0, 0, 0, 184, 185, 7, 17, 0, 0, 185, 186, 7, 15, 0, 0, 186, 187, 7, 9, 0, 0, 187, 188, 7, 15, 0, 0, 188, 189, 7, 0, 0, 0, 189, 190, 7, 0, 0, 0, 190, 52, 1, 0, 0, 0, 191, 192, 7, 5, 0, 0, 192, 193, 7, 10, 0, 0, 193, 194, 7, 19, 0, 0, 194, 201, 7, 3, 0, 0, 195, 196, 7, 20, 0, 0, 196, 197, 7, 15, 0, 0, 197, 198, 7, 0, 0, 0, 198, 199, 7, 9, 0, 0, 199, 201, 7, 3, 0, 0, 200, 191, 1, 0, 0, 0, 200, 195, 1, 0, 0, 0, 201, 54, 1, 0, 0, 0, 202, 203, 7, 21, 0, 0, 203, 56, 1, 0, 0, 0, 204, 206, 3, 55, 27, 0, 205, 204, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 208, 1, 0, 0, 0, 207, 209, 3, 71, 35, 0, 208, 207, 1, 0, 0, 0, 209, 210, 1, 0, 0, 0, 210, 208, 1, 0, 0, 0, 210, 211, 1, 0, 0, 0, 211, 219, 1, 0, 0, 0, 212, 216, 5, 46, 0, 0, 213, 215, 3, 71, 35, 0, 214, 213, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 220, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 212, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 230, 1, 0, 0, 0, 221, 223, 7, 3, 0, 0, 222, 224, 3, 55, 27, 0, 223, 222, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 226, 1, 0, 0, 0, 225, 227, 3, 71, 35, 0, 226, 225, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 231, 1, 0, 0, 0, 230, 221, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 253, 1, 0, 0, 0, 232, 234, 3, 55, 27, 0, 233, 232, 1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 237, 5, 46, 0, 0, 236, 238, 3, 71, 35, 0, 237, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0, 0, 0, 241, 243, 7, 3, 0, 0, 242, 244, 3, 55, 27, 0, 243, 242, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 71, 35, 0, 246, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251, 253, 1, 0, 0, 0, 252, 205, 1, 0, 0, 0, 252, 233, 1, 0, 0, 0, 253, 58, 1, 0, 0, 0, 254, 260, 5, 34, 0, 0, 255, 259, 8, 22, 0, 0, 256, 257, 5, 92, 0, 0, 257, 259, 9, 0, 0, 0, 258, 255, 1, 0, 0, 0, 258, 256, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 263, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 275, 5, 34, 0, 0, 264, 270, 5, 39, 0, 0, 265, 269, 8, 23, 0, 0, 266, 267, 5, 92, 0, 0, 267, 269, 9, 0, 0, 0, 268, 265, 1, 0, 0, 0, 268, 266, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 5, 39, 0, 0, 274, 254, 1, 0, 0, 0, 274, 264, 1, 0, 0, 0, 275, 60, 1, 0, 0, 0, 276, 280, 7, 24, 0, 0, 277, 279, 7, 25, 0, 0, 278, 277, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 62, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 284, 5, 91, 0, 0, 284, 285, 5, 93, 0, 0, 285, 64, 1, 0, 0, 0, 286, 287, 5, 91, 0, 0, 287, 288, 5, 42, 0, 0, 288, 289, 5, 93, 0, 0, 289, 66, 1, 0, 0, 0, 290, 297, 3, 61, 30, 0, 291, 292, 5, 46, 0, 0, 292, 296, 3, 61, 30, 0, 293, 296, 3, 63, 31, 0, 294, 296, 3, 65, 32, 0, 295, 291, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 295, 294, 1, 0, 0, 0, 296, 299, 1, 0, 0, 0, 297, 295, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 68, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 300, 302, 7, 26, 0, 0, 301, 300, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 303, 304, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 306, 6, 34, 0, 0, 306, 70, 1, 0, 0, 0, 307, 308, 7, 27, 0, 0, 308, 72, 1, 0, 0, 0, 309, 311, 8, 28, 0, 0, 310, 309, 1, 0, 0, 0, 311, 312, 1, 0, 0, 0, 312, 310, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 74, 1, 0, 0, 0, 28, 0, 88, 131, 148, 200, 205, 210, 216, 219, 223, 228, 230, 233, 239, 243, 248, 250, 252, 258, 260, 268, 270, 274, 280, 295, 297, 303, 312, 1, 6, 0, 0] \ No newline at end of file diff --git a/frontend/src/parser/FilterQueryLexer.tokens b/frontend/src/parser/FilterQueryLexer.tokens index 4df881075f0a..615858f49676 100644 --- a/frontend/src/parser/FilterQueryLexer.tokens +++ b/frontend/src/parser/FilterQueryLexer.tokens @@ -20,15 +20,16 @@ IN=19 NOT=20 AND=21 OR=22 -HAS=23 -HASANY=24 -HASALL=25 -BOOL=26 -NUMBER=27 -QUOTED_TEXT=28 -KEY=29 -WS=30 -FREETEXT=31 +HASTOKEN=23 +HAS=24 +HASANY=25 +HASALL=26 +BOOL=27 +NUMBER=28 +QUOTED_TEXT=29 +KEY=30 +WS=31 +FREETEXT=32 '('=1 ')'=2 '['=3 diff --git a/frontend/src/parser/FilterQueryLexer.ts b/frontend/src/parser/FilterQueryLexer.ts index ce26b2ff7ced..fe4d66029230 100644 --- a/frontend/src/parser/FilterQueryLexer.ts +++ b/frontend/src/parser/FilterQueryLexer.ts @@ -1,4 +1,4 @@ -// Generated from FilterQuery.g4 by ANTLR 4.13.1 +// Generated from ../../../../grammar/FilterQuery.g4 by ANTLR 4.13.1 // noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols import { ATN, @@ -34,15 +34,16 @@ export default class FilterQueryLexer extends Lexer { public static readonly NOT = 20; public static readonly AND = 21; public static readonly OR = 22; - public static readonly HAS = 23; - public static readonly HASANY = 24; - public static readonly HASALL = 25; - public static readonly BOOL = 26; - public static readonly NUMBER = 27; - public static readonly QUOTED_TEXT = 28; - public static readonly KEY = 29; - public static readonly WS = 30; - public static readonly FREETEXT = 31; + public static readonly HASTOKEN = 23; + public static readonly HAS = 24; + public static readonly HASANY = 25; + public static readonly HASALL = 26; + public static readonly BOOL = 27; + public static readonly NUMBER = 28; + public static readonly QUOTED_TEXT = 29; + public static readonly KEY = 30; + public static readonly WS = 31; + public static readonly FREETEXT = 32; public static readonly EOF = Token.EOF; public static readonly channelNames: string[] = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ]; @@ -65,6 +66,7 @@ export default class FilterQueryLexer extends Lexer { "CONTAINS", "IN", "NOT", "AND", "OR", + "HASTOKEN", "HAS", "HASANY", "HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", @@ -75,8 +77,8 @@ export default class FilterQueryLexer extends Lexer { public static readonly ruleNames: string[] = [ "LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS", "NEQ", "LT", "LE", "GT", "GE", "LIKE", "ILIKE", "BETWEEN", "EXISTS", "REGEXP", - "CONTAINS", "IN", "NOT", "AND", "OR", "HAS", "HASANY", "HASALL", "BOOL", - "SIGN", "NUMBER", "QUOTED_TEXT", "SEGMENT", "EMPTY_BRACKS", "OLD_JSON_BRACKS", + "CONTAINS", "IN", "NOT", "AND", "OR", "HASTOKEN", "HAS", "HASANY", "HASALL", + "BOOL", "SIGN", "NUMBER", "QUOTED_TEXT", "SEGMENT", "EMPTY_BRACKS", "OLD_JSON_BRACKS", "KEY", "WS", "DIGIT", "FREETEXT", ]; @@ -98,113 +100,116 @@ export default class FilterQueryLexer extends Lexer { public get modeNames(): string[] { return FilterQueryLexer.modeNames; } - public static readonly _serializedATN: number[] = [4,0,31,303,6,-1,2,0, + public static readonly _serializedATN: number[] = [4,0,32,314,6,-1,2,0, 7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7,6,2,7,7,7,2,8,7,8,2,9, 7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13,2,14,7,14,2,15,7,15,2,16,7, 16,2,17,7,17,2,18,7,18,2,19,7,19,2,20,7,20,2,21,7,21,2,22,7,22,2,23,7,23, 2,24,7,24,2,25,7,25,2,26,7,26,2,27,7,27,2,28,7,28,2,29,7,29,2,30,7,30,2, - 31,7,31,2,32,7,32,2,33,7,33,2,34,7,34,2,35,7,35,1,0,1,0,1,1,1,1,1,2,1,2, - 1,3,1,3,1,4,1,4,1,5,1,5,1,5,3,5,87,8,5,1,6,1,6,1,6,1,7,1,7,1,7,1,8,1,8, - 1,9,1,9,1,9,1,10,1,10,1,11,1,11,1,11,1,12,1,12,1,12,1,12,1,12,1,13,1,13, - 1,13,1,13,1,13,1,13,1,14,1,14,1,14,1,14,1,14,1,14,1,14,1,14,1,15,1,15,1, - 15,1,15,1,15,1,15,3,15,130,8,15,1,16,1,16,1,16,1,16,1,16,1,16,1,16,1,17, - 1,17,1,17,1,17,1,17,1,17,1,17,1,17,3,17,147,8,17,1,18,1,18,1,18,1,19,1, - 19,1,19,1,19,1,20,1,20,1,20,1,20,1,21,1,21,1,21,1,22,1,22,1,22,1,22,1,23, - 1,23,1,23,1,23,1,23,1,23,1,23,1,24,1,24,1,24,1,24,1,24,1,24,1,24,1,25,1, - 25,1,25,1,25,1,25,1,25,1,25,1,25,1,25,3,25,190,8,25,1,26,1,26,1,27,3,27, - 195,8,27,1,27,4,27,198,8,27,11,27,12,27,199,1,27,1,27,5,27,204,8,27,10, - 27,12,27,207,9,27,3,27,209,8,27,1,27,1,27,3,27,213,8,27,1,27,4,27,216,8, - 27,11,27,12,27,217,3,27,220,8,27,1,27,3,27,223,8,27,1,27,1,27,4,27,227, - 8,27,11,27,12,27,228,1,27,1,27,3,27,233,8,27,1,27,4,27,236,8,27,11,27,12, - 27,237,3,27,240,8,27,3,27,242,8,27,1,28,1,28,1,28,1,28,5,28,248,8,28,10, - 28,12,28,251,9,28,1,28,1,28,1,28,1,28,1,28,5,28,258,8,28,10,28,12,28,261, - 9,28,1,28,3,28,264,8,28,1,29,1,29,5,29,268,8,29,10,29,12,29,271,9,29,1, - 30,1,30,1,30,1,31,1,31,1,31,1,31,1,32,1,32,1,32,1,32,1,32,5,32,285,8,32, - 10,32,12,32,288,9,32,1,33,4,33,291,8,33,11,33,12,33,292,1,33,1,33,1,34, - 1,34,1,35,4,35,300,8,35,11,35,12,35,301,0,0,36,1,1,3,2,5,3,7,4,9,5,11,6, - 13,7,15,8,17,9,19,10,21,11,23,12,25,13,27,14,29,15,31,16,33,17,35,18,37, - 19,39,20,41,21,43,22,45,23,47,24,49,25,51,26,53,0,55,27,57,28,59,0,61,0, - 63,0,65,29,67,30,69,0,71,31,1,0,29,2,0,76,76,108,108,2,0,73,73,105,105, - 2,0,75,75,107,107,2,0,69,69,101,101,2,0,66,66,98,98,2,0,84,84,116,116,2, - 0,87,87,119,119,2,0,78,78,110,110,2,0,88,88,120,120,2,0,83,83,115,115,2, - 0,82,82,114,114,2,0,71,71,103,103,2,0,80,80,112,112,2,0,67,67,99,99,2,0, - 79,79,111,111,2,0,65,65,97,97,2,0,68,68,100,100,2,0,72,72,104,104,2,0,89, - 89,121,121,2,0,85,85,117,117,2,0,70,70,102,102,2,0,43,43,45,45,2,0,34,34, - 92,92,2,0,39,39,92,92,4,0,36,36,65,90,95,95,97,122,6,0,36,36,45,45,47,58, - 65,90,95,95,97,122,3,0,9,10,13,13,32,32,1,0,48,57,8,0,9,10,13,13,32,34, - 39,41,44,44,60,62,91,91,93,93,325,0,1,1,0,0,0,0,3,1,0,0,0,0,5,1,0,0,0,0, - 7,1,0,0,0,0,9,1,0,0,0,0,11,1,0,0,0,0,13,1,0,0,0,0,15,1,0,0,0,0,17,1,0,0, - 0,0,19,1,0,0,0,0,21,1,0,0,0,0,23,1,0,0,0,0,25,1,0,0,0,0,27,1,0,0,0,0,29, - 1,0,0,0,0,31,1,0,0,0,0,33,1,0,0,0,0,35,1,0,0,0,0,37,1,0,0,0,0,39,1,0,0, - 0,0,41,1,0,0,0,0,43,1,0,0,0,0,45,1,0,0,0,0,47,1,0,0,0,0,49,1,0,0,0,0,51, - 1,0,0,0,0,55,1,0,0,0,0,57,1,0,0,0,0,65,1,0,0,0,0,67,1,0,0,0,0,71,1,0,0, - 0,1,73,1,0,0,0,3,75,1,0,0,0,5,77,1,0,0,0,7,79,1,0,0,0,9,81,1,0,0,0,11,86, - 1,0,0,0,13,88,1,0,0,0,15,91,1,0,0,0,17,94,1,0,0,0,19,96,1,0,0,0,21,99,1, - 0,0,0,23,101,1,0,0,0,25,104,1,0,0,0,27,109,1,0,0,0,29,115,1,0,0,0,31,123, - 1,0,0,0,33,131,1,0,0,0,35,138,1,0,0,0,37,148,1,0,0,0,39,151,1,0,0,0,41, - 155,1,0,0,0,43,159,1,0,0,0,45,162,1,0,0,0,47,166,1,0,0,0,49,173,1,0,0,0, - 51,189,1,0,0,0,53,191,1,0,0,0,55,241,1,0,0,0,57,263,1,0,0,0,59,265,1,0, - 0,0,61,272,1,0,0,0,63,275,1,0,0,0,65,279,1,0,0,0,67,290,1,0,0,0,69,296, - 1,0,0,0,71,299,1,0,0,0,73,74,5,40,0,0,74,2,1,0,0,0,75,76,5,41,0,0,76,4, - 1,0,0,0,77,78,5,91,0,0,78,6,1,0,0,0,79,80,5,93,0,0,80,8,1,0,0,0,81,82,5, - 44,0,0,82,10,1,0,0,0,83,87,5,61,0,0,84,85,5,61,0,0,85,87,5,61,0,0,86,83, - 1,0,0,0,86,84,1,0,0,0,87,12,1,0,0,0,88,89,5,33,0,0,89,90,5,61,0,0,90,14, - 1,0,0,0,91,92,5,60,0,0,92,93,5,62,0,0,93,16,1,0,0,0,94,95,5,60,0,0,95,18, - 1,0,0,0,96,97,5,60,0,0,97,98,5,61,0,0,98,20,1,0,0,0,99,100,5,62,0,0,100, - 22,1,0,0,0,101,102,5,62,0,0,102,103,5,61,0,0,103,24,1,0,0,0,104,105,7,0, - 0,0,105,106,7,1,0,0,106,107,7,2,0,0,107,108,7,3,0,0,108,26,1,0,0,0,109, - 110,7,1,0,0,110,111,7,0,0,0,111,112,7,1,0,0,112,113,7,2,0,0,113,114,7,3, - 0,0,114,28,1,0,0,0,115,116,7,4,0,0,116,117,7,3,0,0,117,118,7,5,0,0,118, - 119,7,6,0,0,119,120,7,3,0,0,120,121,7,3,0,0,121,122,7,7,0,0,122,30,1,0, - 0,0,123,124,7,3,0,0,124,125,7,8,0,0,125,126,7,1,0,0,126,127,7,9,0,0,127, - 129,7,5,0,0,128,130,7,9,0,0,129,128,1,0,0,0,129,130,1,0,0,0,130,32,1,0, - 0,0,131,132,7,10,0,0,132,133,7,3,0,0,133,134,7,11,0,0,134,135,7,3,0,0,135, - 136,7,8,0,0,136,137,7,12,0,0,137,34,1,0,0,0,138,139,7,13,0,0,139,140,7, - 14,0,0,140,141,7,7,0,0,141,142,7,5,0,0,142,143,7,15,0,0,143,144,7,1,0,0, - 144,146,7,7,0,0,145,147,7,9,0,0,146,145,1,0,0,0,146,147,1,0,0,0,147,36, - 1,0,0,0,148,149,7,1,0,0,149,150,7,7,0,0,150,38,1,0,0,0,151,152,7,7,0,0, - 152,153,7,14,0,0,153,154,7,5,0,0,154,40,1,0,0,0,155,156,7,15,0,0,156,157, - 7,7,0,0,157,158,7,16,0,0,158,42,1,0,0,0,159,160,7,14,0,0,160,161,7,10,0, - 0,161,44,1,0,0,0,162,163,7,17,0,0,163,164,7,15,0,0,164,165,7,9,0,0,165, - 46,1,0,0,0,166,167,7,17,0,0,167,168,7,15,0,0,168,169,7,9,0,0,169,170,7, - 15,0,0,170,171,7,7,0,0,171,172,7,18,0,0,172,48,1,0,0,0,173,174,7,17,0,0, - 174,175,7,15,0,0,175,176,7,9,0,0,176,177,7,15,0,0,177,178,7,0,0,0,178,179, - 7,0,0,0,179,50,1,0,0,0,180,181,7,5,0,0,181,182,7,10,0,0,182,183,7,19,0, - 0,183,190,7,3,0,0,184,185,7,20,0,0,185,186,7,15,0,0,186,187,7,0,0,0,187, - 188,7,9,0,0,188,190,7,3,0,0,189,180,1,0,0,0,189,184,1,0,0,0,190,52,1,0, - 0,0,191,192,7,21,0,0,192,54,1,0,0,0,193,195,3,53,26,0,194,193,1,0,0,0,194, - 195,1,0,0,0,195,197,1,0,0,0,196,198,3,69,34,0,197,196,1,0,0,0,198,199,1, - 0,0,0,199,197,1,0,0,0,199,200,1,0,0,0,200,208,1,0,0,0,201,205,5,46,0,0, - 202,204,3,69,34,0,203,202,1,0,0,0,204,207,1,0,0,0,205,203,1,0,0,0,205,206, - 1,0,0,0,206,209,1,0,0,0,207,205,1,0,0,0,208,201,1,0,0,0,208,209,1,0,0,0, - 209,219,1,0,0,0,210,212,7,3,0,0,211,213,3,53,26,0,212,211,1,0,0,0,212,213, - 1,0,0,0,213,215,1,0,0,0,214,216,3,69,34,0,215,214,1,0,0,0,216,217,1,0,0, - 0,217,215,1,0,0,0,217,218,1,0,0,0,218,220,1,0,0,0,219,210,1,0,0,0,219,220, - 1,0,0,0,220,242,1,0,0,0,221,223,3,53,26,0,222,221,1,0,0,0,222,223,1,0,0, - 0,223,224,1,0,0,0,224,226,5,46,0,0,225,227,3,69,34,0,226,225,1,0,0,0,227, - 228,1,0,0,0,228,226,1,0,0,0,228,229,1,0,0,0,229,239,1,0,0,0,230,232,7,3, - 0,0,231,233,3,53,26,0,232,231,1,0,0,0,232,233,1,0,0,0,233,235,1,0,0,0,234, - 236,3,69,34,0,235,234,1,0,0,0,236,237,1,0,0,0,237,235,1,0,0,0,237,238,1, - 0,0,0,238,240,1,0,0,0,239,230,1,0,0,0,239,240,1,0,0,0,240,242,1,0,0,0,241, - 194,1,0,0,0,241,222,1,0,0,0,242,56,1,0,0,0,243,249,5,34,0,0,244,248,8,22, - 0,0,245,246,5,92,0,0,246,248,9,0,0,0,247,244,1,0,0,0,247,245,1,0,0,0,248, - 251,1,0,0,0,249,247,1,0,0,0,249,250,1,0,0,0,250,252,1,0,0,0,251,249,1,0, - 0,0,252,264,5,34,0,0,253,259,5,39,0,0,254,258,8,23,0,0,255,256,5,92,0,0, - 256,258,9,0,0,0,257,254,1,0,0,0,257,255,1,0,0,0,258,261,1,0,0,0,259,257, - 1,0,0,0,259,260,1,0,0,0,260,262,1,0,0,0,261,259,1,0,0,0,262,264,5,39,0, - 0,263,243,1,0,0,0,263,253,1,0,0,0,264,58,1,0,0,0,265,269,7,24,0,0,266,268, - 7,25,0,0,267,266,1,0,0,0,268,271,1,0,0,0,269,267,1,0,0,0,269,270,1,0,0, - 0,270,60,1,0,0,0,271,269,1,0,0,0,272,273,5,91,0,0,273,274,5,93,0,0,274, - 62,1,0,0,0,275,276,5,91,0,0,276,277,5,42,0,0,277,278,5,93,0,0,278,64,1, - 0,0,0,279,286,3,59,29,0,280,281,5,46,0,0,281,285,3,59,29,0,282,285,3,61, - 30,0,283,285,3,63,31,0,284,280,1,0,0,0,284,282,1,0,0,0,284,283,1,0,0,0, - 285,288,1,0,0,0,286,284,1,0,0,0,286,287,1,0,0,0,287,66,1,0,0,0,288,286, - 1,0,0,0,289,291,7,26,0,0,290,289,1,0,0,0,291,292,1,0,0,0,292,290,1,0,0, - 0,292,293,1,0,0,0,293,294,1,0,0,0,294,295,6,33,0,0,295,68,1,0,0,0,296,297, - 7,27,0,0,297,70,1,0,0,0,298,300,8,28,0,0,299,298,1,0,0,0,300,301,1,0,0, - 0,301,299,1,0,0,0,301,302,1,0,0,0,302,72,1,0,0,0,28,0,86,129,146,189,194, - 199,205,208,212,217,219,222,228,232,237,239,241,247,249,257,259,263,269, - 284,286,292,301,1,6,0,0]; + 31,7,31,2,32,7,32,2,33,7,33,2,34,7,34,2,35,7,35,2,36,7,36,1,0,1,0,1,1,1, + 1,1,2,1,2,1,3,1,3,1,4,1,4,1,5,1,5,1,5,3,5,89,8,5,1,6,1,6,1,6,1,7,1,7,1, + 7,1,8,1,8,1,9,1,9,1,9,1,10,1,10,1,11,1,11,1,11,1,12,1,12,1,12,1,12,1,12, + 1,13,1,13,1,13,1,13,1,13,1,13,1,14,1,14,1,14,1,14,1,14,1,14,1,14,1,14,1, + 15,1,15,1,15,1,15,1,15,1,15,3,15,132,8,15,1,16,1,16,1,16,1,16,1,16,1,16, + 1,16,1,17,1,17,1,17,1,17,1,17,1,17,1,17,1,17,3,17,149,8,17,1,18,1,18,1, + 18,1,19,1,19,1,19,1,19,1,20,1,20,1,20,1,20,1,21,1,21,1,21,1,22,1,22,1,22, + 1,22,1,22,1,22,1,22,1,22,1,22,1,23,1,23,1,23,1,23,1,24,1,24,1,24,1,24,1, + 24,1,24,1,24,1,25,1,25,1,25,1,25,1,25,1,25,1,25,1,26,1,26,1,26,1,26,1,26, + 1,26,1,26,1,26,1,26,3,26,201,8,26,1,27,1,27,1,28,3,28,206,8,28,1,28,4,28, + 209,8,28,11,28,12,28,210,1,28,1,28,5,28,215,8,28,10,28,12,28,218,9,28,3, + 28,220,8,28,1,28,1,28,3,28,224,8,28,1,28,4,28,227,8,28,11,28,12,28,228, + 3,28,231,8,28,1,28,3,28,234,8,28,1,28,1,28,4,28,238,8,28,11,28,12,28,239, + 1,28,1,28,3,28,244,8,28,1,28,4,28,247,8,28,11,28,12,28,248,3,28,251,8,28, + 3,28,253,8,28,1,29,1,29,1,29,1,29,5,29,259,8,29,10,29,12,29,262,9,29,1, + 29,1,29,1,29,1,29,1,29,5,29,269,8,29,10,29,12,29,272,9,29,1,29,3,29,275, + 8,29,1,30,1,30,5,30,279,8,30,10,30,12,30,282,9,30,1,31,1,31,1,31,1,32,1, + 32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,5,33,296,8,33,10,33,12,33,299,9,33, + 1,34,4,34,302,8,34,11,34,12,34,303,1,34,1,34,1,35,1,35,1,36,4,36,311,8, + 36,11,36,12,36,312,0,0,37,1,1,3,2,5,3,7,4,9,5,11,6,13,7,15,8,17,9,19,10, + 21,11,23,12,25,13,27,14,29,15,31,16,33,17,35,18,37,19,39,20,41,21,43,22, + 45,23,47,24,49,25,51,26,53,27,55,0,57,28,59,29,61,0,63,0,65,0,67,30,69, + 31,71,0,73,32,1,0,29,2,0,76,76,108,108,2,0,73,73,105,105,2,0,75,75,107, + 107,2,0,69,69,101,101,2,0,66,66,98,98,2,0,84,84,116,116,2,0,87,87,119,119, + 2,0,78,78,110,110,2,0,88,88,120,120,2,0,83,83,115,115,2,0,82,82,114,114, + 2,0,71,71,103,103,2,0,80,80,112,112,2,0,67,67,99,99,2,0,79,79,111,111,2, + 0,65,65,97,97,2,0,68,68,100,100,2,0,72,72,104,104,2,0,89,89,121,121,2,0, + 85,85,117,117,2,0,70,70,102,102,2,0,43,43,45,45,2,0,34,34,92,92,2,0,39, + 39,92,92,4,0,36,36,65,90,95,95,97,122,6,0,36,36,45,45,47,58,65,90,95,95, + 97,122,3,0,9,10,13,13,32,32,1,0,48,57,8,0,9,10,13,13,32,34,39,41,44,44, + 60,62,91,91,93,93,336,0,1,1,0,0,0,0,3,1,0,0,0,0,5,1,0,0,0,0,7,1,0,0,0,0, + 9,1,0,0,0,0,11,1,0,0,0,0,13,1,0,0,0,0,15,1,0,0,0,0,17,1,0,0,0,0,19,1,0, + 0,0,0,21,1,0,0,0,0,23,1,0,0,0,0,25,1,0,0,0,0,27,1,0,0,0,0,29,1,0,0,0,0, + 31,1,0,0,0,0,33,1,0,0,0,0,35,1,0,0,0,0,37,1,0,0,0,0,39,1,0,0,0,0,41,1,0, + 0,0,0,43,1,0,0,0,0,45,1,0,0,0,0,47,1,0,0,0,0,49,1,0,0,0,0,51,1,0,0,0,0, + 53,1,0,0,0,0,57,1,0,0,0,0,59,1,0,0,0,0,67,1,0,0,0,0,69,1,0,0,0,0,73,1,0, + 0,0,1,75,1,0,0,0,3,77,1,0,0,0,5,79,1,0,0,0,7,81,1,0,0,0,9,83,1,0,0,0,11, + 88,1,0,0,0,13,90,1,0,0,0,15,93,1,0,0,0,17,96,1,0,0,0,19,98,1,0,0,0,21,101, + 1,0,0,0,23,103,1,0,0,0,25,106,1,0,0,0,27,111,1,0,0,0,29,117,1,0,0,0,31, + 125,1,0,0,0,33,133,1,0,0,0,35,140,1,0,0,0,37,150,1,0,0,0,39,153,1,0,0,0, + 41,157,1,0,0,0,43,161,1,0,0,0,45,164,1,0,0,0,47,173,1,0,0,0,49,177,1,0, + 0,0,51,184,1,0,0,0,53,200,1,0,0,0,55,202,1,0,0,0,57,252,1,0,0,0,59,274, + 1,0,0,0,61,276,1,0,0,0,63,283,1,0,0,0,65,286,1,0,0,0,67,290,1,0,0,0,69, + 301,1,0,0,0,71,307,1,0,0,0,73,310,1,0,0,0,75,76,5,40,0,0,76,2,1,0,0,0,77, + 78,5,41,0,0,78,4,1,0,0,0,79,80,5,91,0,0,80,6,1,0,0,0,81,82,5,93,0,0,82, + 8,1,0,0,0,83,84,5,44,0,0,84,10,1,0,0,0,85,89,5,61,0,0,86,87,5,61,0,0,87, + 89,5,61,0,0,88,85,1,0,0,0,88,86,1,0,0,0,89,12,1,0,0,0,90,91,5,33,0,0,91, + 92,5,61,0,0,92,14,1,0,0,0,93,94,5,60,0,0,94,95,5,62,0,0,95,16,1,0,0,0,96, + 97,5,60,0,0,97,18,1,0,0,0,98,99,5,60,0,0,99,100,5,61,0,0,100,20,1,0,0,0, + 101,102,5,62,0,0,102,22,1,0,0,0,103,104,5,62,0,0,104,105,5,61,0,0,105,24, + 1,0,0,0,106,107,7,0,0,0,107,108,7,1,0,0,108,109,7,2,0,0,109,110,7,3,0,0, + 110,26,1,0,0,0,111,112,7,1,0,0,112,113,7,0,0,0,113,114,7,1,0,0,114,115, + 7,2,0,0,115,116,7,3,0,0,116,28,1,0,0,0,117,118,7,4,0,0,118,119,7,3,0,0, + 119,120,7,5,0,0,120,121,7,6,0,0,121,122,7,3,0,0,122,123,7,3,0,0,123,124, + 7,7,0,0,124,30,1,0,0,0,125,126,7,3,0,0,126,127,7,8,0,0,127,128,7,1,0,0, + 128,129,7,9,0,0,129,131,7,5,0,0,130,132,7,9,0,0,131,130,1,0,0,0,131,132, + 1,0,0,0,132,32,1,0,0,0,133,134,7,10,0,0,134,135,7,3,0,0,135,136,7,11,0, + 0,136,137,7,3,0,0,137,138,7,8,0,0,138,139,7,12,0,0,139,34,1,0,0,0,140,141, + 7,13,0,0,141,142,7,14,0,0,142,143,7,7,0,0,143,144,7,5,0,0,144,145,7,15, + 0,0,145,146,7,1,0,0,146,148,7,7,0,0,147,149,7,9,0,0,148,147,1,0,0,0,148, + 149,1,0,0,0,149,36,1,0,0,0,150,151,7,1,0,0,151,152,7,7,0,0,152,38,1,0,0, + 0,153,154,7,7,0,0,154,155,7,14,0,0,155,156,7,5,0,0,156,40,1,0,0,0,157,158, + 7,15,0,0,158,159,7,7,0,0,159,160,7,16,0,0,160,42,1,0,0,0,161,162,7,14,0, + 0,162,163,7,10,0,0,163,44,1,0,0,0,164,165,7,17,0,0,165,166,7,15,0,0,166, + 167,7,9,0,0,167,168,7,5,0,0,168,169,7,14,0,0,169,170,7,2,0,0,170,171,7, + 3,0,0,171,172,7,7,0,0,172,46,1,0,0,0,173,174,7,17,0,0,174,175,7,15,0,0, + 175,176,7,9,0,0,176,48,1,0,0,0,177,178,7,17,0,0,178,179,7,15,0,0,179,180, + 7,9,0,0,180,181,7,15,0,0,181,182,7,7,0,0,182,183,7,18,0,0,183,50,1,0,0, + 0,184,185,7,17,0,0,185,186,7,15,0,0,186,187,7,9,0,0,187,188,7,15,0,0,188, + 189,7,0,0,0,189,190,7,0,0,0,190,52,1,0,0,0,191,192,7,5,0,0,192,193,7,10, + 0,0,193,194,7,19,0,0,194,201,7,3,0,0,195,196,7,20,0,0,196,197,7,15,0,0, + 197,198,7,0,0,0,198,199,7,9,0,0,199,201,7,3,0,0,200,191,1,0,0,0,200,195, + 1,0,0,0,201,54,1,0,0,0,202,203,7,21,0,0,203,56,1,0,0,0,204,206,3,55,27, + 0,205,204,1,0,0,0,205,206,1,0,0,0,206,208,1,0,0,0,207,209,3,71,35,0,208, + 207,1,0,0,0,209,210,1,0,0,0,210,208,1,0,0,0,210,211,1,0,0,0,211,219,1,0, + 0,0,212,216,5,46,0,0,213,215,3,71,35,0,214,213,1,0,0,0,215,218,1,0,0,0, + 216,214,1,0,0,0,216,217,1,0,0,0,217,220,1,0,0,0,218,216,1,0,0,0,219,212, + 1,0,0,0,219,220,1,0,0,0,220,230,1,0,0,0,221,223,7,3,0,0,222,224,3,55,27, + 0,223,222,1,0,0,0,223,224,1,0,0,0,224,226,1,0,0,0,225,227,3,71,35,0,226, + 225,1,0,0,0,227,228,1,0,0,0,228,226,1,0,0,0,228,229,1,0,0,0,229,231,1,0, + 0,0,230,221,1,0,0,0,230,231,1,0,0,0,231,253,1,0,0,0,232,234,3,55,27,0,233, + 232,1,0,0,0,233,234,1,0,0,0,234,235,1,0,0,0,235,237,5,46,0,0,236,238,3, + 71,35,0,237,236,1,0,0,0,238,239,1,0,0,0,239,237,1,0,0,0,239,240,1,0,0,0, + 240,250,1,0,0,0,241,243,7,3,0,0,242,244,3,55,27,0,243,242,1,0,0,0,243,244, + 1,0,0,0,244,246,1,0,0,0,245,247,3,71,35,0,246,245,1,0,0,0,247,248,1,0,0, + 0,248,246,1,0,0,0,248,249,1,0,0,0,249,251,1,0,0,0,250,241,1,0,0,0,250,251, + 1,0,0,0,251,253,1,0,0,0,252,205,1,0,0,0,252,233,1,0,0,0,253,58,1,0,0,0, + 254,260,5,34,0,0,255,259,8,22,0,0,256,257,5,92,0,0,257,259,9,0,0,0,258, + 255,1,0,0,0,258,256,1,0,0,0,259,262,1,0,0,0,260,258,1,0,0,0,260,261,1,0, + 0,0,261,263,1,0,0,0,262,260,1,0,0,0,263,275,5,34,0,0,264,270,5,39,0,0,265, + 269,8,23,0,0,266,267,5,92,0,0,267,269,9,0,0,0,268,265,1,0,0,0,268,266,1, + 0,0,0,269,272,1,0,0,0,270,268,1,0,0,0,270,271,1,0,0,0,271,273,1,0,0,0,272, + 270,1,0,0,0,273,275,5,39,0,0,274,254,1,0,0,0,274,264,1,0,0,0,275,60,1,0, + 0,0,276,280,7,24,0,0,277,279,7,25,0,0,278,277,1,0,0,0,279,282,1,0,0,0,280, + 278,1,0,0,0,280,281,1,0,0,0,281,62,1,0,0,0,282,280,1,0,0,0,283,284,5,91, + 0,0,284,285,5,93,0,0,285,64,1,0,0,0,286,287,5,91,0,0,287,288,5,42,0,0,288, + 289,5,93,0,0,289,66,1,0,0,0,290,297,3,61,30,0,291,292,5,46,0,0,292,296, + 3,61,30,0,293,296,3,63,31,0,294,296,3,65,32,0,295,291,1,0,0,0,295,293,1, + 0,0,0,295,294,1,0,0,0,296,299,1,0,0,0,297,295,1,0,0,0,297,298,1,0,0,0,298, + 68,1,0,0,0,299,297,1,0,0,0,300,302,7,26,0,0,301,300,1,0,0,0,302,303,1,0, + 0,0,303,301,1,0,0,0,303,304,1,0,0,0,304,305,1,0,0,0,305,306,6,34,0,0,306, + 70,1,0,0,0,307,308,7,27,0,0,308,72,1,0,0,0,309,311,8,28,0,0,310,309,1,0, + 0,0,311,312,1,0,0,0,312,310,1,0,0,0,312,313,1,0,0,0,313,74,1,0,0,0,28,0, + 88,131,148,200,205,210,216,219,223,228,230,233,239,243,248,250,252,258, + 260,268,270,274,280,295,297,303,312,1,6,0,0]; private static __ATN: ATN; public static get _ATN(): ATN { diff --git a/frontend/src/parser/FilterQueryListener.ts b/frontend/src/parser/FilterQueryListener.ts index 661f04157b17..a05a158de3a9 100644 --- a/frontend/src/parser/FilterQueryListener.ts +++ b/frontend/src/parser/FilterQueryListener.ts @@ -1,4 +1,4 @@ -// Generated from FilterQuery.g4 by ANTLR 4.13.1 +// Generated from ../../../../grammar/FilterQuery.g4 by ANTLR 4.13.1 import {ParseTreeListener} from "antlr4"; @@ -198,3 +198,4 @@ export default class FilterQueryListener extends ParseTreeListener { */ exitKey?: (ctx: KeyContext) => void; } + diff --git a/frontend/src/parser/FilterQueryParser.ts b/frontend/src/parser/FilterQueryParser.ts index 70d9142bb768..d9c11a1646f9 100644 --- a/frontend/src/parser/FilterQueryParser.ts +++ b/frontend/src/parser/FilterQueryParser.ts @@ -1,4 +1,4 @@ -// Generated from FilterQuery.g4 by ANTLR 4.13.1 +// Generated from ../../../../grammar/FilterQuery.g4 by ANTLR 4.13.1 // noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols import { @@ -41,15 +41,16 @@ export default class FilterQueryParser extends Parser { public static readonly NOT = 20; public static readonly AND = 21; public static readonly OR = 22; - public static readonly HAS = 23; - public static readonly HASANY = 24; - public static readonly HASALL = 25; - public static readonly BOOL = 26; - public static readonly NUMBER = 27; - public static readonly QUOTED_TEXT = 28; - public static readonly KEY = 29; - public static readonly WS = 30; - public static readonly FREETEXT = 31; + public static readonly HASTOKEN = 23; + public static readonly HAS = 24; + public static readonly HASANY = 25; + public static readonly HASALL = 26; + public static readonly BOOL = 27; + public static readonly NUMBER = 28; + public static readonly QUOTED_TEXT = 29; + public static readonly KEY = 30; + public static readonly WS = 31; + public static readonly FREETEXT = 32; public static readonly EOF = Token.EOF; public static readonly RULE_query = 0; public static readonly RULE_expression = 1; @@ -87,6 +88,7 @@ export default class FilterQueryParser extends Parser { "CONTAINS", "IN", "NOT", "AND", "OR", + "HASTOKEN", "HAS", "HASANY", "HASALL", "BOOL", "NUMBER", "QUOTED_TEXT", @@ -220,7 +222,7 @@ export default class FilterQueryParser extends Parser { this.state = 53; this._errHandler.sync(this); _la = this._input.LA(1); - while ((((_la) & ~0x1F) === 0 && ((1 << _la) & 3215982594) !== 0)) { + while (((((_la - 1)) & ~0x1F) === 0 && ((1 << (_la - 1)) & 3218604033) !== 0)) { { this.state = 51; this._errHandler.sync(this); @@ -242,7 +244,8 @@ export default class FilterQueryParser extends Parser { case 27: case 28: case 29: - case 31: + case 30: + case 32: { this.state = 50; this.unaryExpression(); @@ -808,7 +811,7 @@ export default class FilterQueryParser extends Parser { { this.state = 190; _la = this._input.LA(1); - if(!(_la===28 || _la===31)) { + if(!(_la===29 || _la===32)) { this._errHandler.recoverInline(this); } else { @@ -841,7 +844,7 @@ export default class FilterQueryParser extends Parser { { this.state = 192; _la = this._input.LA(1); - if(!((((_la) & ~0x1F) === 0 && ((1 << _la) & 58720256) !== 0))) { + if(!((((_la) & ~0x1F) === 0 && ((1 << _la) & 125829120) !== 0))) { this._errHandler.recoverInline(this); } else { @@ -996,7 +999,7 @@ export default class FilterQueryParser extends Parser { { this.state = 214; _la = this._input.LA(1); - if(!((((_la) & ~0x1F) === 0 && ((1 << _la) & 1006632960) !== 0))) { + if(!((((_la) & ~0x1F) === 0 && ((1 << _la) & 2013265920) !== 0))) { this._errHandler.recoverInline(this); } else { @@ -1045,7 +1048,7 @@ export default class FilterQueryParser extends Parser { return localctx; } - public static readonly _serializedATN: number[] = [4,1,31,219,2,0,7,0,2, + public static readonly _serializedATN: number[] = [4,1,32,219,2,0,7,0,2, 1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7,6,2,7,7,7,2,8,7,8,2,9,7,9,2, 10,7,10,2,11,7,11,2,12,7,12,2,13,7,13,2,14,7,14,2,15,7,15,2,16,7,16,1,0, 1,0,1,0,1,1,1,1,1,2,1,2,1,2,5,2,43,8,2,10,2,12,2,46,9,2,1,3,1,3,1,3,1,3, @@ -1060,7 +1063,7 @@ export default class FilterQueryParser extends Parser { 10,9,12,9,189,9,9,1,10,1,10,1,11,1,11,1,11,1,11,1,11,1,12,1,12,1,12,5,12, 201,8,12,10,12,12,12,204,9,12,1,13,1,13,1,13,3,13,209,8,13,1,14,1,14,1, 14,1,14,1,15,1,15,1,16,1,16,1,16,0,0,17,0,2,4,6,8,10,12,14,16,18,20,22, - 24,26,28,30,32,0,5,1,0,7,8,1,0,13,14,2,0,28,28,31,31,1,0,23,25,1,0,26,29, + 24,26,28,30,32,0,5,1,0,7,8,1,0,13,14,2,0,29,29,32,32,1,0,23,26,1,0,27,30, 235,0,34,1,0,0,0,2,37,1,0,0,0,4,39,1,0,0,0,6,47,1,0,0,0,8,57,1,0,0,0,10, 70,1,0,0,0,12,149,1,0,0,0,14,163,1,0,0,0,16,180,1,0,0,0,18,182,1,0,0,0, 20,190,1,0,0,0,22,192,1,0,0,0,24,197,1,0,0,0,26,208,1,0,0,0,28,210,1,0, @@ -1112,7 +1115,7 @@ export default class FilterQueryParser extends Parser { 0,0,205,209,3,32,16,0,206,209,3,30,15,0,207,209,3,28,14,0,208,205,1,0,0, 0,208,206,1,0,0,0,208,207,1,0,0,0,209,27,1,0,0,0,210,211,5,3,0,0,211,212, 3,18,9,0,212,213,5,4,0,0,213,29,1,0,0,0,214,215,7,4,0,0,215,31,1,0,0,0, - 216,217,5,29,0,0,217,33,1,0,0,0,11,44,51,53,57,70,149,163,180,187,202,208]; + 216,217,5,30,0,0,217,33,1,0,0,0,11,44,51,53,57,70,149,163,180,187,202,208]; private static __ATN: ATN; public static get _ATN(): ATN { @@ -1647,6 +1650,9 @@ export class FunctionCallContext extends ParserRuleContext { public RPAREN(): TerminalNode { return this.getToken(FilterQueryParser.RPAREN, 0); } + public HASTOKEN(): TerminalNode { + return this.getToken(FilterQueryParser.HASTOKEN, 0); + } public HAS(): TerminalNode { return this.getToken(FilterQueryParser.HAS, 0); } diff --git a/frontend/src/parser/FilterQueryVisitor.ts b/frontend/src/parser/FilterQueryVisitor.ts index 098508ced356..9ffa568cf36e 100644 --- a/frontend/src/parser/FilterQueryVisitor.ts +++ b/frontend/src/parser/FilterQueryVisitor.ts @@ -1,4 +1,4 @@ -// Generated from FilterQuery.g4 by ANTLR 4.13.1 +// Generated from ../../../../grammar/FilterQuery.g4 by ANTLR 4.13.1 import {ParseTreeVisitor} from 'antlr4'; @@ -133,3 +133,4 @@ export default class FilterQueryVisitor extends ParseTreeVisitor */ visitKey?: (ctx: KeyContext) => Result; } + diff --git a/frontend/src/utils/tokenUtils.ts b/frontend/src/utils/tokenUtils.ts index 96fc830c13d0..98d164139dc2 100644 --- a/frontend/src/utils/tokenUtils.ts +++ b/frontend/src/utils/tokenUtils.ts @@ -74,6 +74,7 @@ export function isFunctionToken(tokenType: number): boolean { FilterQueryLexer.HAS, FilterQueryLexer.HASANY, FilterQueryLexer.HASALL, + FilterQueryLexer.HASTOKEN, ].includes(tokenType); } From 61acd946ccc3622a535e6b17dedcf51dc4e5034d Mon Sep 17 00:00:00 2001 From: Vibhu Pandey Date: Thu, 11 Sep 2025 23:29:55 +0530 Subject: [PATCH 16/51] fix: enable dot metrics by default (#9061) --- ee/query-service/constants/constants.go | 2 +- .../app/metrics/v3/cumulative_table_test.go | 6 ++-- .../app/metrics/v3/delta_table_test.go | 6 ++-- .../app/metrics/v3/query_builder_test.go | 10 +++--- .../app/metrics/v4/cumulative/table_test.go | 6 ++-- .../metrics/v4/cumulative/timeseries_test.go | 10 +++--- .../app/metrics/v4/delta/table_test.go | 6 ++-- .../app/metrics/v4/delta/time_series_test.go | 12 +++---- .../metrics/v4/query_builder_pre_agg_test.go | 20 +++++------ .../app/metrics/v4/query_builder_test.go | 36 +++++++++---------- .../app/queryBuilder/query_builder_test.go | 10 +++--- pkg/query-service/constants/constants.go | 2 +- 12 files changed, 63 insertions(+), 63 deletions(-) diff --git a/ee/query-service/constants/constants.go b/ee/query-service/constants/constants.go index 31653369c6fb..b7c6d11fc58b 100644 --- a/ee/query-service/constants/constants.go +++ b/ee/query-service/constants/constants.go @@ -40,7 +40,7 @@ var IsDotMetricsEnabled = false var IsPreferSpanMetrics = false func init() { - if GetOrDefaultEnv(DotMetricsEnabled, "false") == "true" { + if GetOrDefaultEnv(DotMetricsEnabled, "true") == "true" { IsDotMetricsEnabled = true } diff --git a/pkg/query-service/app/metrics/v3/cumulative_table_test.go b/pkg/query-service/app/metrics/v3/cumulative_table_test.go index 26b0e3c3eee4..ce4577c3da7d 100644 --- a/pkg/query-service/app/metrics/v3/cumulative_table_test.go +++ b/pkg/query-service/app/metrics/v3/cumulative_table_test.go @@ -38,7 +38,7 @@ func TestPanelTableForCumulative(t *testing.T) { }, Expression: "A", }, - expected: "SELECT toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency_count'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch']) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_count'] AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts", + expected: "SELECT toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency_count'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch']) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_count'] AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts", }, { name: "latency p50", @@ -66,7 +66,7 @@ func TestPanelTableForCumulative(t *testing.T) { }, }, }, - expected: "SELECT toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.500) as value FROM (SELECT le, toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT le, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000 AND JSONExtractString(labels, 'service_name') = 'frontend') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY fingerprint, le,ts ORDER BY fingerprint, le ASC, ts) WINDOW rate_window as (PARTITION BY fingerprint, le ORDER BY fingerprint, le ASC, ts)) WHERE isNaN(rate_value) = 0 GROUP BY le,ts ORDER BY le ASC, ts) GROUP BY ts ORDER BY ts", + expected: "SELECT toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.500) as value FROM (SELECT le, toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT le, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000 AND JSONExtractString(labels, 'service_name') = 'frontend') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY fingerprint, le,ts ORDER BY fingerprint, le ASC, ts) WINDOW rate_window as (PARTITION BY fingerprint, le ORDER BY fingerprint, le ASC, ts)) WHERE isNaN(rate_value) = 0 GROUP BY le,ts ORDER BY le ASC, ts) GROUP BY ts ORDER BY ts", }, { name: "latency p99 with group by", @@ -88,7 +88,7 @@ func TestPanelTableForCumulative(t *testing.T) { }, Expression: "A", }, - expected: "SELECT service_name, toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name,le, toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT service_name,le, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, service_name,le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY fingerprint, service_name,le,ts ORDER BY fingerprint, service_name ASC,le ASC, ts) WINDOW rate_window as (PARTITION BY fingerprint, service_name,le ORDER BY fingerprint, service_name ASC,le ASC, ts)) WHERE isNaN(rate_value) = 0 GROUP BY service_name,le,ts ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts", + expected: "SELECT service_name, toStartOfHour(now()) as ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name,le, toStartOfHour(now()) as ts, sum(rate_value)/29 as value FROM (SELECT service_name,le, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, service_name,le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY fingerprint, service_name,le,ts ORDER BY fingerprint, service_name ASC,le ASC, ts) WINDOW rate_window as (PARTITION BY fingerprint, service_name,le ORDER BY fingerprint, service_name ASC,le ASC, ts)) WHERE isNaN(rate_value) = 0 GROUP BY service_name,le,ts ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts", }, } diff --git a/pkg/query-service/app/metrics/v3/delta_table_test.go b/pkg/query-service/app/metrics/v3/delta_table_test.go index 9da2437bcfff..684ae490408b 100644 --- a/pkg/query-service/app/metrics/v3/delta_table_test.go +++ b/pkg/query-service/app/metrics/v3/delta_table_test.go @@ -38,7 +38,7 @@ func TestPanelTableForDelta(t *testing.T) { }, Expression: "A", }, - expected: "SELECT toStartOfHour(now()) as ts, sum(value)/1800 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency_count'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch']) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_count'] AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY ts ORDER BY ts", + expected: "SELECT toStartOfHour(now()) as ts, sum(value)/1800 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency_count'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch']) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_count'] AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY ts ORDER BY ts", }, { name: "latency p50", @@ -61,7 +61,7 @@ func TestPanelTableForDelta(t *testing.T) { }, Expression: "A", }, - expected: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.500) as value FROM (SELECT le, toStartOfHour(now()) as ts, sum(value)/1800 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000 AND JSONExtractString(labels, 'service_name') = 'frontend') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY le,ts ORDER BY le ASC, ts) GROUP BY ts ORDER BY ts", + expected: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.500) as value FROM (SELECT le, toStartOfHour(now()) as ts, sum(value)/1800 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000 AND JSONExtractString(labels, 'service_name') = 'frontend') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY le,ts ORDER BY le ASC, ts) GROUP BY ts ORDER BY ts", }, { name: "latency p99 with group by", @@ -80,7 +80,7 @@ func TestPanelTableForDelta(t *testing.T) { }, Expression: "A", }, - expected: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name,le, toStartOfHour(now()) as ts, sum(value)/1800 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY service_name,le,ts ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts", + expected: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name,le, toStartOfHour(now()) as ts, sum(value)/1800 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1689253200000 AND unix_milli < 1689257640000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1689255866000 AND unix_milli <= 1689257640000 GROUP BY service_name,le,ts ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts", }, } diff --git a/pkg/query-service/app/metrics/v3/query_builder_test.go b/pkg/query-service/app/metrics/v3/query_builder_test.go index 20fe909ed31e..7bdbb01e18e2 100644 --- a/pkg/query-service/app/metrics/v3/query_builder_test.go +++ b/pkg/query-service/app/metrics/v3/query_builder_test.go @@ -58,7 +58,7 @@ func TestBuildQueryWithFilters(t *testing.T) { query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{}) require.NoError(t, err) - require.Contains(t, query, "WHERE metric_name IN ['name'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'a') != 'b'") + require.Contains(t, query, "WHERE metric_name IN ['name'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'a') != 'b'") require.Contains(t, query, rateWithoutNegative) require.Contains(t, query, "not match(JSONExtractString(labels, 'code'), 'ERROR_*')") }) @@ -97,7 +97,7 @@ func TestBuildQueryWithMultipleQueries(t *testing.T) { query, err := PrepareMetricQuery(q.Start, q.End, q.CompositeQuery.QueryType, q.CompositeQuery.PanelType, q.CompositeQuery.BuilderQueries["A"], Options{}) require.NoError(t, err) - require.Contains(t, query, "WHERE metric_name IN ['name'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'in') IN ['a','b','c']") + require.Contains(t, query, "WHERE metric_name IN ['name'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'in') IN ['a','b','c']") require.Contains(t, query, rateWithoutNegative) }) } @@ -105,7 +105,7 @@ func TestBuildQueryWithMultipleQueries(t *testing.T) { func TestBuildQueryXRate(t *testing.T) { t.Run("TestBuildQueryXRate", func(t *testing.T) { - tmpl := `SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['name'] AND temporality = '' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['name'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts` + tmpl := `SELECT ts, %s(rate_value) as value FROM (SELECT ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as rate_value FROM(SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['name'] AND temporality = '' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['name'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts) ) WHERE isNaN(rate_value) = 0 GROUP BY ts ORDER BY ts` cases := []struct { aggregateOperator v3.AggregateOperator @@ -403,7 +403,7 @@ func TestBuildQueryWithDotInMetricAndAttributes(t *testing.T) { }, }, }, - expected: "SELECT *, now() AS ts FROM (SELECT avgIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp FROM (SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = '' AND __normalized = true AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000 AND JSONExtractString(labels, 'os.type') = 'linux') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1735036080000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` asc, ts) )", + expected: "SELECT *, now() AS ts FROM (SELECT avgIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp FROM (SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = '' AND __normalized = false AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000 AND JSONExtractString(labels, 'os.type') = 'linux') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1735036080000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` asc, ts) )", }, { name: "TestBuildQueryWithDotInMetricAndAttributes with dot in metric and attributes with rate_avg aggregation", @@ -474,7 +474,7 @@ func TestBuildQueryWithDotInMetricAndAttributes(t *testing.T) { }, }, }, - expected: "SELECT *, now() AS ts FROM (SELECT avgIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp FROM (SELECT `os.type`, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as value FROM(SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = '' AND __normalized = true AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000 AND JSONExtractString(labels, 'os.type') = 'linux') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1735036020000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` asc, ts) WINDOW rate_window as (PARTITION BY `os.type` ORDER BY `os.type`, ts) ) )", + expected: "SELECT *, now() AS ts FROM (SELECT avgIf(value, toUnixTimestamp(ts) != 0) as value, anyIf(ts, toUnixTimestamp(ts) != 0) AS timestamp FROM (SELECT `os.type`, ts, If((value - lagInFrame(value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as value FROM(SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = '' AND __normalized = false AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000 AND JSONExtractString(labels, 'os.type') = 'linux') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1735036020000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` asc, ts) WINDOW rate_window as (PARTITION BY `os.type` ORDER BY `os.type`, ts) ) )", }, } for _, testCase := range cases { diff --git a/pkg/query-service/app/metrics/v4/cumulative/table_test.go b/pkg/query-service/app/metrics/v4/cumulative/table_test.go index 8372751dbd67..b6bf12e8bdb9 100644 --- a/pkg/query-service/app/metrics/v4/cumulative/table_test.go +++ b/pkg/query-service/app/metrics/v4/cumulative/table_test.go @@ -51,7 +51,7 @@ func TestPrepareTableQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['system_memory_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name IN ['system_memory_usage'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['system_memory_usage'] AND temporality = 'Unspecified' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name IN ['system_memory_usage'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative", @@ -93,7 +93,7 @@ func TestPrepareTableQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, { name: "test time aggregation = avg, space aggregation = avg, temporality = unspecified, testing metrics and attribute name with dot", @@ -148,7 +148,7 @@ func TestPrepareTableQuery(t *testing.T) { }, start: 1735295140000, end: 1735554340000, - expectedQueryContains: "SELECT state, ts, avg(per_series_value) as value FROM (SELECT fingerprint, any(state) as state, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum) / sum(count) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'state') as state, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1735257600000 AND unix_milli < 1735554340000 AND JSONExtractString(labels, 'host.name') = 'signoz-host') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1735295140000 AND unix_milli < 1735554340000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY state, ts ORDER BY state desc, ts ASC", + expectedQueryContains: "SELECT state, ts, avg(per_series_value) as value FROM (SELECT fingerprint, any(state) as state, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum) / sum(count) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'state') as state, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = 'Unspecified' AND __normalized = false AND unix_milli >= 1735257600000 AND unix_milli < 1735554340000 AND JSONExtractString(labels, 'host.name') = 'signoz-host') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1735295140000 AND unix_milli < 1735554340000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY state, ts ORDER BY state desc, ts ASC", }, } diff --git a/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go b/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go index c1e514fcc97a..41fd46d08cf3 100644 --- a/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go +++ b/pkg/query-service/app/metrics/v4/cumulative/timeseries_test.go @@ -66,7 +66,7 @@ func TestPrepareTimeAggregationSubQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts", + expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts", }, { name: "test time aggregation = rate, temporality = cumulative", @@ -107,7 +107,7 @@ func TestPrepareTimeAggregationSubQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)", + expectedQueryContains: "SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)", }, } @@ -168,7 +168,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['system_memory_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name IN ['system_memory_usage'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['system_memory_usage'] AND temporality = 'Unspecified' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name IN ['system_memory_usage'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative", @@ -210,7 +210,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, { name: "test time aggregation = avg, space aggregation = avg, temporality = unspecified, testing metrics and attribute name with dot", @@ -265,7 +265,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) { }, start: 1735295140000, end: 1735554340000, - expectedQueryContains: "SELECT state, ts, avg(per_series_value) as value FROM (SELECT fingerprint, any(state) as state, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum) / sum(count) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'state') as state, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1735257600000 AND unix_milli < 1735554340000 AND JSONExtractString(labels, 'host.name') = 'signoz-host') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1735295140000 AND unix_milli < 1735554340000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY state, ts ORDER BY state desc, ts ASC", + expectedQueryContains: "SELECT state, ts, avg(per_series_value) as value FROM (SELECT fingerprint, any(state) as state, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum) / sum(count) as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'state') as state, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = 'Unspecified' AND __normalized = false AND unix_milli >= 1735257600000 AND unix_milli < 1735554340000 AND JSONExtractString(labels, 'host.name') = 'signoz-host') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1735295140000 AND unix_milli < 1735554340000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY state, ts ORDER BY state desc, ts ASC", }, } diff --git a/pkg/query-service/app/metrics/v4/delta/table_test.go b/pkg/query-service/app/metrics/v4/delta/table_test.go index e18e5dbbde34..b7c3d392bdf1 100644 --- a/pkg/query-service/app/metrics/v4/delta/table_test.go +++ b/pkg/query-service/app/metrics/v4/delta/table_test.go @@ -53,7 +53,7 @@ func TestPrepareTableQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['system_memory_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name IN ['system_memory_usage'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['system_memory_usage'] AND temporality = 'Unspecified' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name IN ['system_memory_usage'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = delta", @@ -95,7 +95,7 @@ func TestPrepareTableQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, { name: "test time aggregation = rate, space aggregation = avg, temporality = delta, testing metrics and attribute name with dot", @@ -143,7 +143,7 @@ func TestPrepareTableQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT ts, avg(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz.latency.sum'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'host.name') = '4f6ec470feea') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz.latency.sum'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, avg(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz.latency.sum'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'host.name') = '4f6ec470feea') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz.latency.sum'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", }, } diff --git a/pkg/query-service/app/metrics/v4/delta/time_series_test.go b/pkg/query-service/app/metrics/v4/delta/time_series_test.go index 255fa26a854c..0a8a6cdd1d35 100644 --- a/pkg/query-service/app/metrics/v4/delta/time_series_test.go +++ b/pkg/query-service/app/metrics/v4/delta/time_series_test.go @@ -66,7 +66,7 @@ func TestPrepareTimeAggregationSubQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts", + expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts", }, { name: "test time aggregation = rate, temporality = delta", @@ -107,7 +107,7 @@ func TestPrepareTimeAggregationSubQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts", + expectedQueryContains: "SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts", }, } @@ -168,7 +168,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['system_memory_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name IN ['system_memory_usage'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['system_memory_usage'] AND temporality = 'Unspecified' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND JSONExtractString(labels, 'state') != 'idle') as filtered_time_series USING fingerprint WHERE metric_name IN ['system_memory_usage'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = delta", @@ -210,7 +210,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000 AND like(JSONExtractString(labels, 'service_name'), '%payment_service%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['http_requests'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, { name: "test time aggregation = rate, space aggregation percentile99, type = ExponentialHistogram", @@ -244,7 +244,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) { }, start: 1701794980000, end: 1701796780000, - expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, quantilesDDMerge(0.01, 0.990000)(sketch)[1] as value FROM signoz_metrics.distributed_exp_hist INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, quantilesDDMerge(0.01, 0.990000)(sketch)[1] as value FROM signoz_metrics.distributed_exp_hist INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['signoz_latency'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1701792000000 AND unix_milli < 1701796780000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency'] AND unix_milli >= 1701794980000 AND unix_milli < 1701796780000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, { name: "test time aggregation = rate, space aggregation = max, temporality = delta, testing metrics and attribute name with dot", @@ -299,7 +299,7 @@ func TestPrepareTimeseriesQuery(t *testing.T) { }, start: 1735036101000, end: 1735637901000, - expectedQueryContains: "SELECT `host.name`, ts, max(per_series_value) as value FROM (SELECT fingerprint, any(`host.name`) as `host.name`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum)/60 as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host.name') as `host.name`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz.latency.sum'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1734998400000 AND unix_milli < 1735637901000 AND JSONExtractString(labels, 'host_name') = '4f6ec470feea') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz.latency.sum'] AND unix_milli >= 1735036101000 AND unix_milli < 1735637901000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY `host.name`, ts ORDER BY `host.name` ASC, ts ASC", + expectedQueryContains: "SELECT `host.name`, ts, max(per_series_value) as value FROM (SELECT fingerprint, any(`host.name`) as `host.name`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(sum)/60 as per_series_value FROM signoz_metrics.distributed_samples_v4_agg_5m INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host.name') as `host.name`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz.latency.sum'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1734998400000 AND unix_milli < 1735637901000 AND JSONExtractString(labels, 'host_name') = '4f6ec470feea') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz.latency.sum'] AND unix_milli >= 1735036101000 AND unix_milli < 1735637901000 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY `host.name`, ts ORDER BY `host.name` ASC, ts ASC", }, } diff --git a/pkg/query-service/app/metrics/v4/query_builder_pre_agg_test.go b/pkg/query-service/app/metrics/v4/query_builder_pre_agg_test.go index bf6d81bc48ac..d1c314dfc0b1 100644 --- a/pkg/query-service/app/metrics/v4/query_builder_pre_agg_test.go +++ b/pkg/query-service/app/metrics/v4/query_builder_pre_agg_test.go @@ -49,7 +49,7 @@ func TestPrepareMetricQueryCumulativeRatePreAgg(t *testing.T) { TimeAggregation: v3.TimeAggregationRate, SpaceAggregation: v3.SpaceAggregationSum, }, - expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative, multiple group by", @@ -82,7 +82,7 @@ func TestPrepareMetricQueryCumulativeRatePreAgg(t *testing.T) { TimeAggregation: v3.TimeAggregationRate, SpaceAggregation: v3.SpaceAggregationSum, }, - expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, endpoint, ts ORDER BY service_name ASC, endpoint ASC, ts ASC", + expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, endpoint, ts ORDER BY service_name ASC, endpoint ASC, ts ASC", }, } @@ -123,7 +123,7 @@ func TestPrepareMetricQueryDeltaRatePreAgg(t *testing.T) { TimeAggregation: v3.TimeAggregationRate, SpaceAggregation: v3.SpaceAggregationSum, }, - expectedQueryContains: "SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY ts ORDER BY ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = delta, group by service_name", @@ -149,7 +149,7 @@ func TestPrepareMetricQueryDeltaRatePreAgg(t *testing.T) { TimeAggregation: v3.TimeAggregationRate, SpaceAggregation: v3.SpaceAggregationSum, }, - expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, } @@ -204,7 +204,7 @@ func TestPrepreMetricQueryCumulativeQuantilePreAgg(t *testing.T) { Disabled: false, SpaceAggregation: v3.SpaceAggregationPercentile99, }, - expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, { name: "test temporality = cumulative, quantile = 0.99 without group by", @@ -234,7 +234,7 @@ func TestPrepreMetricQueryCumulativeQuantilePreAgg(t *testing.T) { Disabled: false, SpaceAggregation: v3.SpaceAggregationPercentile99, }, - expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", }, } @@ -289,7 +289,7 @@ func TestPrepreMetricQueryDeltaQuantilePreAgg(t *testing.T) { Disabled: false, SpaceAggregation: v3.SpaceAggregationPercentile99, }, - expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, { name: "test temporality = delta, quantile = 0.99 no group by", @@ -319,7 +319,7 @@ func TestPrepreMetricQueryDeltaQuantilePreAgg(t *testing.T) { Disabled: false, SpaceAggregation: v3.SpaceAggregationPercentile99, }, - expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", }, } @@ -360,7 +360,7 @@ func TestPrepareMetricQueryGaugePreAgg(t *testing.T) { SpaceAggregation: v3.SpaceAggregationSum, Disabled: false, }, - expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", }, { name: "test gauge query with group by host_name", @@ -386,7 +386,7 @@ func TestPrepareMetricQueryGaugePreAgg(t *testing.T) { Expression: "A", Disabled: false, }, - expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY host_name, ts ORDER BY host_name ASC, ts ASC", + expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY host_name, ts ORDER BY host_name ASC, ts ASC", }, } diff --git a/pkg/query-service/app/metrics/v4/query_builder_test.go b/pkg/query-service/app/metrics/v4/query_builder_test.go index 94b06d66ac20..6af48a102c80 100644 --- a/pkg/query-service/app/metrics/v4/query_builder_test.go +++ b/pkg/query-service/app/metrics/v4/query_builder_test.go @@ -33,7 +33,7 @@ func TestPrepareTimeseriesFilterQuery(t *testing.T) { Disabled: false, // remaining struct fields are not needed here }, - expectedQueryContains: "SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000", + expectedQueryContains: "SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000", }, { name: "test prepare time series with no filters and group by", @@ -58,7 +58,7 @@ func TestPrepareTimeseriesFilterQuery(t *testing.T) { Disabled: false, // remaining struct fields are not needed here }, - expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000", + expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000", }, { name: "test prepare time series with no filters and multiple group by", @@ -90,7 +90,7 @@ func TestPrepareTimeseriesFilterQuery(t *testing.T) { Disabled: false, // remaining struct fields are not needed here }, - expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000", + expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000", }, { name: "test prepare time series with filters and multiple group by", @@ -138,7 +138,7 @@ func TestPrepareTimeseriesFilterQuery(t *testing.T) { Disabled: false, // remaining struct fields are not needed here }, - expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000 AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']", + expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000 AND JSONExtractString(labels, 'service_name') != 'payment_service' AND JSONExtractString(labels, 'endpoint') IN ['/paycallback','/payme','/paypal']", }, { name: "test prepare time series with filters and multiple group by", @@ -186,7 +186,7 @@ func TestPrepareTimeseriesFilterQuery(t *testing.T) { Disabled: false, // remaining struct fields are not needed here }, - expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000 AND ilike(JSONExtractString(labels, 'service_name'), 'payment_service') AND notILike(JSONExtractString(labels, 'endpoint'), 'payment_service')", + expectedQueryContains: "SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['http_requests'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1706428800000 AND unix_milli < 1706434026000 AND ilike(JSONExtractString(labels, 'service_name'), 'payment_service') AND notILike(JSONExtractString(labels, 'endpoint'), 'payment_service')", }, } @@ -242,7 +242,7 @@ func TestPrepareMetricQueryCumulativeRate(t *testing.T) { TimeAggregation: v3.TimeAggregationRate, SpaceAggregation: v3.SpaceAggregationSum, }, - expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, ts, sum(per_series_value) as value FROM (SELECT service_name, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = cumulative, multiple group by", @@ -275,7 +275,7 @@ func TestPrepareMetricQueryCumulativeRate(t *testing.T) { TimeAggregation: v3.TimeAggregationRate, SpaceAggregation: v3.SpaceAggregationSum, }, - expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, endpoint, ts ORDER BY service_name ASC, endpoint ASC, ts ASC", + expectedQueryContains: "SELECT service_name, endpoint, ts, sum(per_series_value) as value FROM (SELECT service_name, endpoint, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(endpoint) as endpoint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'endpoint') as endpoint, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991920000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, endpoint, ts ORDER BY service_name ASC, endpoint ASC, ts ASC", }, } @@ -316,7 +316,7 @@ func TestPrepareMetricQueryDeltaRate(t *testing.T) { TimeAggregation: v3.TimeAggregationRate, SpaceAggregation: v3.SpaceAggregationSum, }, - expectedQueryContains: "SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY ts ORDER BY ts ASC", }, { name: "test time aggregation = rate, space aggregation = sum, temporality = delta, group by service_name", @@ -342,7 +342,7 @@ func TestPrepareMetricQueryDeltaRate(t *testing.T) { TimeAggregation: v3.TimeAggregationRate, SpaceAggregation: v3.SpaceAggregationSum, }, - expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_calls_total'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_calls_total'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, } @@ -397,7 +397,7 @@ func TestPrepreMetricQueryCumulativeQuantile(t *testing.T) { Disabled: false, SpaceAggregation: v3.SpaceAggregationPercentile99, }, - expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, ts, sum(per_series_value) as value FROM (SELECT service_name, le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(service_name) as service_name, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, { name: "test temporality = cumulative, quantile = 0.99 without group by", @@ -427,7 +427,7 @@ func TestPrepreMetricQueryCumulativeQuantile(t *testing.T) { Disabled: false, SpaceAggregation: v3.SpaceAggregationPercentile99, }, - expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, ts, sum(per_series_value) as value FROM (SELECT le, ts, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, nan, If((ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window) >= 86400, nan, (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window))) as per_series_value FROM (SELECT fingerprint, any(le) as le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WINDOW rate_window as (PARTITION BY fingerprint ORDER BY fingerprint, ts)) WHERE isNaN(per_series_value) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", }, } @@ -482,7 +482,7 @@ func TestPrepreMetricQueryDeltaQuantile(t *testing.T) { Disabled: false, SpaceAggregation: v3.SpaceAggregationPercentile99, }, - expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", + expectedQueryContains: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT service_name, le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY service_name, le, ts ORDER BY service_name ASC, le ASC, ts ASC) GROUP BY service_name, ts ORDER BY service_name ASC, ts ASC", }, { name: "test temporality = delta, quantile = 0.99 no group by", @@ -512,7 +512,7 @@ func TestPrepreMetricQueryDeltaQuantile(t *testing.T) { Disabled: false, SpaceAggregation: v3.SpaceAggregationPercentile99, }, - expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.990) as value FROM (SELECT le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND like(JSONExtractString(labels, 'service_name'), '%frontend%')) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY le, ts ORDER BY le ASC, ts ASC) GROUP BY ts ORDER BY ts ASC", }, } @@ -553,7 +553,7 @@ func TestPrepareMetricQueryGauge(t *testing.T) { SpaceAggregation: v3.SpaceAggregationSum, Disabled: false, }, - expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", + expectedQueryContains: "SELECT ts, sum(per_series_value) as value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY ts ORDER BY ts ASC", }, { name: "test value filter with string value", @@ -610,7 +610,7 @@ func TestPrepareMetricQueryGauge(t *testing.T) { Expression: "A", Disabled: false, }, - expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY host_name, ts ORDER BY host_name ASC, ts ASC", + expectedQueryContains: "SELECT host_name, ts, sum(per_series_value) as value FROM (SELECT fingerprint, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system_cpu_usage'] AND temporality = 'Unspecified' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system_cpu_usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY host_name, ts ORDER BY host_name ASC, ts ASC", }, { name: "test gauge query with multiple group by with metric and attribute name containing dot", @@ -679,7 +679,7 @@ func TestPrepareMetricQueryGauge(t *testing.T) { ReduceTo: v3.ReduceToOperatorAvg, Having: []v3.Having{}, }, - expectedQueryContains: "SELECT `os.type`, state, `host.name`, ts, max(per_series_value) as value FROM (SELECT fingerprint, any(`os.type`) as `os.type`, any(state) as state, any(`host.name`) as `host.name`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, JSONExtractString(labels, 'state') as state, JSONExtractString(labels, 'host.name') as `host.name`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = 'Unspecified' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'host.name') = 'signoz-host') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY `os.type`, state, `host.name`, ts ORDER BY `os.type` desc, state asc, `host.name` ASC, ts ASC", + expectedQueryContains: "SELECT `os.type`, state, `host.name`, ts, max(per_series_value) as value FROM (SELECT fingerprint, any(`os.type`) as `os.type`, any(state) as state, any(`host.name`) as `host.name`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, max(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, JSONExtractString(labels, 'state') as state, JSONExtractString(labels, 'host.name') as `host.name`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = 'Unspecified' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'host.name') = 'signoz-host') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1650991980000 AND unix_milli < 1651078380000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY `os.type`, state, `host.name`, ts ORDER BY `os.type` desc, state asc, `host.name` ASC, ts ASC", }, } @@ -759,7 +759,7 @@ func TestPrepareMetricQueryValueTypePanelWithGroupBY(t *testing.T) { }, }, }, - expectedQueryContains: "SELECT max(value) as aggregated_value, ts FROM (SELECT state, ts, avg(per_series_value) as value FROM (SELECT fingerprint, any(state) as state, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, anyLast(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'state') as state, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['system_memory_usage'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1735891200000 AND unix_milli < 1735894800000 AND JSONExtractString(labels, 'os_type') = 'linux') as filtered_time_series USING fingerprint WHERE metric_name IN ['system_memory_usage'] AND unix_milli >= 1735891800000 AND unix_milli < 1735894800000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY state, ts ORDER BY state desc, ts ASC) GROUP BY ts ORDER BY ts", + expectedQueryContains: "SELECT max(value) as aggregated_value, ts FROM (SELECT state, ts, avg(per_series_value) as value FROM (SELECT fingerprint, any(state) as state, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, anyLast(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'state') as state, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['system_memory_usage'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1735891200000 AND unix_milli < 1735894800000 AND JSONExtractString(labels, 'os_type') = 'linux') as filtered_time_series USING fingerprint WHERE metric_name IN ['system_memory_usage'] AND unix_milli >= 1735891800000 AND unix_milli < 1735894800000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY state, ts ORDER BY state desc, ts ASC) GROUP BY ts ORDER BY ts", }, { name: "test temporality = cumulative, panel = value, series agg = max group by state, host_name", @@ -825,7 +825,7 @@ func TestPrepareMetricQueryValueTypePanelWithGroupBY(t *testing.T) { }, }, }, - expectedQueryContains: "SELECT max(value) as aggregated_value, ts FROM (SELECT state, host_name, ts, avg(per_series_value) as value FROM (SELECT fingerprint, any(state) as state, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, anyLast(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'state') as state, JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['system_memory_usage'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1735891200000 AND unix_milli < 1735894800000 AND JSONExtractString(labels, 'os_type') = 'linux') as filtered_time_series USING fingerprint WHERE metric_name IN ['system_memory_usage'] AND unix_milli >= 1735891800000 AND unix_milli < 1735894800000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY state, host_name, ts ORDER BY state desc, host_name ASC, ts ASC) GROUP BY ts ORDER BY ts", + expectedQueryContains: "SELECT max(value) as aggregated_value, ts FROM (SELECT state, host_name, ts, avg(per_series_value) as value FROM (SELECT fingerprint, any(state) as state, any(host_name) as host_name, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, anyLast(value) as per_series_value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'state') as state, JSONExtractString(labels, 'host_name') as host_name, fingerprint FROM signoz_metrics.time_series_v4 WHERE metric_name IN ['system_memory_usage'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1735891200000 AND unix_milli < 1735894800000 AND JSONExtractString(labels, 'os_type') = 'linux') as filtered_time_series USING fingerprint WHERE metric_name IN ['system_memory_usage'] AND unix_milli >= 1735891800000 AND unix_milli < 1735894800000 AND bitAnd(flags, 1) = 0 GROUP BY fingerprint, ts ORDER BY fingerprint, ts) WHERE isNaN(per_series_value) = 0 GROUP BY state, host_name, ts ORDER BY state desc, host_name ASC, ts ASC) GROUP BY ts ORDER BY ts", }, } diff --git a/pkg/query-service/app/queryBuilder/query_builder_test.go b/pkg/query-service/app/queryBuilder/query_builder_test.go index e089b6c555fa..3a8218095977 100644 --- a/pkg/query-service/app/queryBuilder/query_builder_test.go +++ b/pkg/query-service/app/queryBuilder/query_builder_test.go @@ -57,7 +57,7 @@ func TestBuildQueryWithMultipleQueriesAndFormula(t *testing.T) { require.NoError(t, err) require.Contains(t, queries["C"], "SELECT A.`ts` as `ts`, A.value / B.value") - require.Contains(t, queries["C"], "WHERE metric_name IN ['name'] AND temporality = 'Cumulative' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'in') IN ['a','b','c']") + require.Contains(t, queries["C"], "WHERE metric_name IN ['name'] AND temporality = 'Cumulative' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'in') IN ['a','b','c']") require.Contains(t, queries["C"], "(value - lagInFrame(value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDate('1970-01-01')) OVER rate_window)))") }) } @@ -337,7 +337,7 @@ func TestBuildQueryWithThreeOrMoreQueriesRefAndFormula(t *testing.T) { qb := NewQueryBuilder(qbOptions) queries, err := qb.PrepareQueries(q) - require.Contains(t, queries["F1"], "SELECT A.`os.type` as `os.type`, A.`ts` as `ts`, A.value + B.value as value FROM (SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = '' AND __normalized = true AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000 AND JSONExtractString(labels, 'os.type') = 'linux') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1735036080000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` ASC, ts) as A INNER JOIN (SELECT * FROM (SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.network.io'] AND temporality = '' AND __normalized = true AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system.network.io'] AND unix_milli >= 1735036020000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` ASC, ts) HAVING value > 4) as B ON A.`os.type` = B.`os.type` AND A.`ts` = B.`ts`") + require.Contains(t, queries["F1"], "SELECT A.`os.type` as `os.type`, A.`ts` as `ts`, A.value + B.value as value FROM (SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, avg(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.memory.usage'] AND temporality = '' AND __normalized = false AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000 AND JSONExtractString(labels, 'os.type') = 'linux') as filtered_time_series USING fingerprint WHERE metric_name IN ['system.memory.usage'] AND unix_milli >= 1735036080000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` ASC, ts) as A INNER JOIN (SELECT * FROM (SELECT `os.type`, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value) as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'os.type') as `os.type`, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['system.network.io'] AND temporality = '' AND __normalized = false AND unix_milli >= 1734998400000 AND unix_milli < 1735637880000) as filtered_time_series USING fingerprint WHERE metric_name IN ['system.network.io'] AND unix_milli >= 1735036020000 AND unix_milli < 1735637880000 GROUP BY `os.type`, ts ORDER BY `os.type` ASC, ts) HAVING value > 4) as B ON A.`os.type` = B.`os.type` AND A.`ts` = B.`ts`") require.NoError(t, err) }) @@ -386,7 +386,7 @@ func TestDeltaQueryBuilder(t *testing.T) { }, }, queryToTest: "A", - expected: "SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_count'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch'] AND JSONExtractString(labels, '__temporality__') = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_count'] AND unix_milli >= 1650991980000 AND unix_milli <= 1651078380000 GROUP BY ts ORDER BY ts", + expected: "SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_count'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch'] AND JSONExtractString(labels, '__temporality__') = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_count'] AND unix_milli >= 1650991980000 AND unix_milli <= 1651078380000 GROUP BY ts ORDER BY ts", }, { name: "TestQueryWithExpression - Error rate", @@ -456,7 +456,7 @@ func TestDeltaQueryBuilder(t *testing.T) { }, }, queryToTest: "C", - expected: "SELECT A.`ts` as `ts`, A.value * 100 / B.value as value FROM (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_count'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch'] AND JSONExtractString(labels, 'status_code') IN ['STATUS_CODE_ERROR'] AND JSONExtractString(labels, '__temporality__') = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_count'] AND unix_milli >= 1650991980000 AND unix_milli <= 1651078380000 GROUP BY ts ORDER BY ts) as A INNER JOIN (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_count'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch'] AND JSONExtractString(labels, '__temporality__') = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_count'] AND unix_milli >= 1650991980000 AND unix_milli <= 1651078380000 GROUP BY ts ORDER BY ts) as B ON A.`ts` = B.`ts`", + expected: "SELECT A.`ts` as `ts`, A.value * 100 / B.value as value FROM (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_count'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch'] AND JSONExtractString(labels, 'status_code') IN ['STATUS_CODE_ERROR'] AND JSONExtractString(labels, '__temporality__') = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_count'] AND unix_milli >= 1650991980000 AND unix_milli <= 1651078380000 GROUP BY ts ORDER BY ts) as A INNER JOIN (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_count'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000 AND JSONExtractString(labels, 'service_name') IN ['frontend'] AND JSONExtractString(labels, 'operation') IN ['HTTP GET /dispatch'] AND JSONExtractString(labels, '__temporality__') = 'Delta') as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_count'] AND unix_milli >= 1650991980000 AND unix_milli <= 1651078380000 GROUP BY ts ORDER BY ts) as B ON A.`ts` = B.`ts`", }, { name: "TestQuery - Quantile", @@ -484,7 +484,7 @@ func TestDeltaQueryBuilder(t *testing.T) { }, }, queryToTest: "A", - expected: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) as value FROM (SELECT service_name,le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = true AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli <= 1651078380000 GROUP BY service_name,le,ts ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts", + expected: "SELECT service_name, ts, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) as value FROM (SELECT service_name,le, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), INTERVAL 60 SECOND) as ts, sum(value)/60 as value FROM signoz_metrics.distributed_samples_v4 INNER JOIN (SELECT DISTINCT JSONExtractString(labels, 'service_name') as service_name, JSONExtractString(labels, 'le') as le, fingerprint FROM signoz_metrics.time_series_v4_1day WHERE metric_name IN ['signoz_latency_bucket'] AND temporality = 'Delta' AND __normalized = false AND unix_milli >= 1650931200000 AND unix_milli < 1651078380000) as filtered_time_series USING fingerprint WHERE metric_name IN ['signoz_latency_bucket'] AND unix_milli >= 1650991980000 AND unix_milli <= 1651078380000 GROUP BY service_name,le,ts ORDER BY service_name ASC,le ASC, ts) GROUP BY service_name,ts ORDER BY service_name ASC, ts", }, } diff --git a/pkg/query-service/constants/constants.go b/pkg/query-service/constants/constants.go index 5bb72805ac3e..2bbfb2e823f7 100644 --- a/pkg/query-service/constants/constants.go +++ b/pkg/query-service/constants/constants.go @@ -696,7 +696,7 @@ var MaxJSONFlatteningDepth = 1 func init() { StaticFieldsTraces = maps.Clone(NewStaticFieldsTraces) maps.Copy(StaticFieldsTraces, DeprecatedStaticFieldsTraces) - if GetOrDefaultEnv(DotMetricsEnabled, "false") == "true" { + if GetOrDefaultEnv(DotMetricsEnabled, "true") == "true" { IsDotMetricsEnabled = true } if GetOrDefaultEnv("USE_SPAN_METRICS", "false") == "true" { From d801fcee7675c9f2aed90def9eac048b4f9bb499 Mon Sep 17 00:00:00 2001 From: aniketio-ctrl Date: Fri, 12 Sep 2025 13:11:54 +0530 Subject: [PATCH 17/51] chore: add multiple thresholds support (#8816) --- ee/query-service/rules/anomaly.go | 29 +- pkg/contextlinks/alert_link_visitor.go | 5 + pkg/contextlinks/links.go | 5 + pkg/contextlinks/types.go | 7 +- pkg/query-service/rules/base_rule.go | 7 + pkg/query-service/rules/manager.go | 85 ++-- pkg/query-service/rules/prom_rule.go | 149 +++---- pkg/query-service/rules/promrule_test.go | 135 ++++-- pkg/query-service/rules/threshold_rule.go | 31 +- .../rules/threshold_rule_test.go | 388 +++++++++++++++- pkg/types/ruletypes/alerting.go | 21 +- pkg/types/ruletypes/api_params.go | 166 +++---- pkg/types/ruletypes/api_params_test.go | 307 +++++++++++++ pkg/types/ruletypes/constants.go | 4 + pkg/types/ruletypes/threshold.go | 419 ++++++++++++++++++ 15 files changed, 1436 insertions(+), 322 deletions(-) create mode 100644 pkg/types/ruletypes/constants.go create mode 100644 pkg/types/ruletypes/threshold.go diff --git a/ee/query-service/rules/anomaly.go b/ee/query-service/rules/anomaly.go index 3fbf1e32b1f2..ff0aa40be8d8 100644 --- a/ee/query-service/rules/anomaly.go +++ b/ee/query-service/rules/anomaly.go @@ -35,7 +35,6 @@ import ( anomalyV2 "github.com/SigNoz/signoz/ee/anomaly" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" - yaml "gopkg.in/yaml.v2" ) const ( @@ -253,10 +252,17 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON)) for _, series := range queryResult.AnomalyScores { - smpl, shouldAlert := r.ShouldAlert(*series) - if shouldAlert { - resultVector = append(resultVector, smpl) + if r.Condition() != nil && r.Condition().RequireMinPoints { + if len(series.Points) < r.Condition().RequiredNumPoints { + r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints) + continue + } } + results, err := r.Threshold.ShouldAlert(*series) + if err != nil { + return nil, err + } + resultVector = append(resultVector, results...) } return resultVector, nil } @@ -296,10 +302,17 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON)) for _, series := range queryResult.AnomalyScores { - smpl, shouldAlert := r.ShouldAlert(*series) - if shouldAlert { - resultVector = append(resultVector, smpl) + if r.Condition().RequireMinPoints { + if len(series.Points) < r.Condition().RequiredNumPoints { + r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints) + continue + } } + results, err := r.Threshold.ShouldAlert(*series) + if err != nil { + return nil, err + } + resultVector = append(resultVector, results...) } return resultVector, nil } @@ -499,7 +512,7 @@ func (r *AnomalyRule) String() string { PreferredChannels: r.PreferredChannels(), } - byt, err := yaml.Marshal(ar) + byt, err := json.Marshal(ar) if err != nil { return fmt.Sprintf("error marshaling alerting rule: %s", err.Error()) } diff --git a/pkg/contextlinks/alert_link_visitor.go b/pkg/contextlinks/alert_link_visitor.go index b1166ca0519d..09714553e9e7 100644 --- a/pkg/contextlinks/alert_link_visitor.go +++ b/pkg/contextlinks/alert_link_visitor.go @@ -46,6 +46,11 @@ func PrepareFilterExpression(labels map[string]string, whereClause string, group return "" } + //delete predefined alert labels + for _, label := range PredefinedAlertLabels { + delete(labels, label) + } + groupBySet := make(map[string]struct{}) for _, item := range groupByItems { groupBySet[item.Name] = struct{}{} diff --git a/pkg/contextlinks/links.go b/pkg/contextlinks/links.go index 8412b4757c00..8497cb55de9b 100644 --- a/pkg/contextlinks/links.go +++ b/pkg/contextlinks/links.go @@ -150,6 +150,11 @@ func PrepareLinksToLogs(start, end time.Time, filterItems []v3.FilterItem) strin func PrepareFilters(labels map[string]string, whereClauseItems []v3.FilterItem, groupByItems []v3.AttributeKey, keys map[string]v3.AttributeKey) []v3.FilterItem { filterItems := make([]v3.FilterItem, 0) + //delete predefined alert labels + for _, label := range PredefinedAlertLabels { + delete(labels, label) + } + added := make(map[string]struct{}) for _, item := range whereClauseItems { diff --git a/pkg/contextlinks/types.go b/pkg/contextlinks/types.go index bfc1ab6abbe0..9edf92a5e541 100644 --- a/pkg/contextlinks/types.go +++ b/pkg/contextlinks/types.go @@ -1,6 +1,9 @@ package contextlinks -import v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" +import ( + v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" + "github.com/SigNoz/signoz/pkg/types/ruletypes" +) // TODO(srikanthccv): Fix the URL management type URLShareableTimeRange struct { @@ -38,3 +41,5 @@ type URLShareableOptions struct { Format string `json:"format"` SelectColumns []v3.AttributeKey `json:"selectColumns"` } + +var PredefinedAlertLabels = []string{ruletypes.LabelThresholdName} diff --git a/pkg/query-service/rules/base_rule.go b/pkg/query-service/rules/base_rule.go index 40952e4a5059..be14b9133f9b 100644 --- a/pkg/query-service/rules/base_rule.go +++ b/pkg/query-service/rules/base_rule.go @@ -32,6 +32,8 @@ type BaseRule struct { typ ruletypes.AlertType ruleCondition *ruletypes.RuleCondition + + Threshold ruletypes.RuleThreshold // evalWindow is the time window used for evaluating the rule // i.e each time we lookback from the current time, we look at data for the last // evalWindow duration @@ -123,6 +125,10 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader if p.RuleCondition == nil || !p.RuleCondition.IsValid() { return nil, fmt.Errorf("invalid rule condition") } + threshold, err := p.RuleCondition.Thresholds.GetRuleThreshold() + if err != nil { + return nil, err + } baseRule := &BaseRule{ id: id, @@ -139,6 +145,7 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader Active: map[uint64]*ruletypes.Alert{}, reader: reader, TemporalityMap: make(map[string]map[v3.Temporality]bool), + Threshold: threshold, } if baseRule.evalWindow == 0 { diff --git a/pkg/query-service/rules/manager.go b/pkg/query-service/rules/manager.go index 341bac74291d..f80686682687 100644 --- a/pkg/query-service/rules/manager.go +++ b/pkg/query-service/rules/manager.go @@ -266,30 +266,17 @@ func (m *Manager) initiate(ctx context.Context) error { for _, rec := range storedRules { taskName := fmt.Sprintf("%s-groupname", rec.ID.StringValue()) - parsedRule, err := ruletypes.ParsePostableRule([]byte(rec.Data)) + parsedRule := ruletypes.PostableRule{} + err := json.Unmarshal([]byte(rec.Data), &parsedRule) if err != nil { - if errors.Is(err, ruletypes.ErrFailedToParseJSON) { - zap.L().Info("failed to load rule in json format, trying yaml now:", zap.String("name", taskName)) - - // see if rule is stored in yaml format - parsedRule, err = ruletypes.ParsePostableRuleWithKind([]byte(rec.Data), ruletypes.RuleDataKindYaml) - - if err != nil { - zap.L().Error("failed to parse and initialize yaml rule", zap.String("name", taskName), zap.Error(err)) - // just one rule is being parsed so expect just one error - loadErrors = append(loadErrors, err) - continue - } - } else { - zap.L().Error("failed to parse and initialize rule", zap.String("name", taskName), zap.Error(err)) - // just one rule is being parsed so expect just one error - loadErrors = append(loadErrors, err) - continue - } + zap.L().Info("failed to load rule in json format", zap.String("name", taskName)) + loadErrors = append(loadErrors, err) + continue } + if !parsedRule.Disabled { - err := m.addTask(ctx, org.ID, parsedRule, taskName) + err := m.addTask(ctx, org.ID, &parsedRule, taskName) if err != nil { zap.L().Error("failed to load the rule definition", zap.String("name", taskName), zap.Error(err)) } @@ -335,8 +322,8 @@ func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID) if err != nil { return err } - - parsedRule, err := ruletypes.ParsePostableRule([]byte(ruleStr)) + parsedRule := ruletypes.PostableRule{} + err = json.Unmarshal([]byte(ruleStr), &parsedRule) if err != nil { return err } @@ -380,7 +367,7 @@ func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID) return err } - err = m.syncRuleStateWithTask(ctx, orgID, prepareTaskName(existingRule.ID.StringValue()), parsedRule) + err = m.syncRuleStateWithTask(ctx, orgID, prepareTaskName(existingRule.ID.StringValue()), &parsedRule) if err != nil { return err } @@ -513,8 +500,8 @@ func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*ruletypes.Ge if err != nil { return nil, err } - - parsedRule, err := ruletypes.ParsePostableRule([]byte(ruleStr)) + parsedRule := ruletypes.PostableRule{} + err = json.Unmarshal([]byte(ruleStr), &parsedRule) if err != nil { return nil, err } @@ -567,7 +554,7 @@ func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*ruletypes.Ge } taskName := prepareTaskName(id.StringValue()) - if err := m.addTask(ctx, orgID, parsedRule, taskName); err != nil { + if err := m.addTask(ctx, orgID, &parsedRule, taskName); err != nil { return err } @@ -579,7 +566,7 @@ func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*ruletypes.Ge return &ruletypes.GettableRule{ Id: id.StringValue(), - PostableRule: *parsedRule, + PostableRule: parsedRule, }, nil } @@ -797,8 +784,9 @@ func (m *Manager) ListRuleStates(ctx context.Context) (*ruletypes.GettableRules, for _, s := range storedRules { - ruleResponse := &ruletypes.GettableRule{} - if err := json.Unmarshal([]byte(s.Data), ruleResponse); err != nil { // Parse []byte to go struct pointer + ruleResponse := ruletypes.GettableRule{} + err = json.Unmarshal([]byte(s.Data), &ruleResponse) + if err != nil { zap.L().Error("failed to unmarshal rule from db", zap.String("id", s.ID.StringValue()), zap.Error(err)) continue } @@ -816,7 +804,7 @@ func (m *Manager) ListRuleStates(ctx context.Context) (*ruletypes.GettableRules, ruleResponse.CreatedBy = &s.CreatedBy ruleResponse.UpdatedAt = &s.UpdatedAt ruleResponse.UpdatedBy = &s.UpdatedBy - resp = append(resp, ruleResponse) + resp = append(resp, &ruleResponse) } return &ruletypes.GettableRules{Rules: resp}, nil @@ -827,8 +815,10 @@ func (m *Manager) GetRule(ctx context.Context, id valuer.UUID) (*ruletypes.Getta if err != nil { return nil, err } - r := &ruletypes.GettableRule{} - if err := json.Unmarshal([]byte(s.Data), r); err != nil { + r := ruletypes.GettableRule{} + err = json.Unmarshal([]byte(s.Data), &r) + if err != nil { + zap.L().Error("failed to unmarshal rule from db", zap.String("id", s.ID.StringValue()), zap.Error(err)) return nil, err } r.Id = id.StringValue() @@ -844,7 +834,7 @@ func (m *Manager) GetRule(ctx context.Context, id valuer.UUID) (*ruletypes.Getta r.UpdatedAt = &s.UpdatedAt r.UpdatedBy = &s.UpdatedBy - return r, nil + return &r, nil } // syncRuleStateWithTask ensures that the state of a stored rule matches @@ -902,20 +892,14 @@ func (m *Manager) PatchRule(ctx context.Context, ruleStr string, id valuer.UUID) } // storedRule holds the current stored rule from DB - storedRule := ruletypes.PostableRule{} - if err := json.Unmarshal([]byte(storedJSON.Data), &storedRule); err != nil { + patchedRule := ruletypes.PostableRule{} + if err := json.Unmarshal([]byte(ruleStr), &patchedRule); err != nil { zap.L().Error("failed to unmarshal stored rule with given id", zap.String("id", id.StringValue()), zap.Error(err)) return nil, err } - // patchedRule is combo of stored rule and patch received in the request - patchedRule, err := ruletypes.ParseIntoRule(storedRule, []byte(ruleStr), "json") - if err != nil { - return nil, err - } - // deploy or un-deploy task according to patched (new) rule state - if err := m.syncRuleStateWithTask(ctx, orgID, taskName, patchedRule); err != nil { + if err := m.syncRuleStateWithTask(ctx, orgID, taskName, &patchedRule); err != nil { zap.L().Error("failed to sync stored rule state with the task", zap.String("taskName", taskName), zap.Error(err)) return nil, err } @@ -933,7 +917,7 @@ func (m *Manager) PatchRule(ctx context.Context, ruleStr string, id valuer.UUID) err = m.ruleStore.EditRule(ctx, storedJSON, func(ctx context.Context) error { return nil }) if err != nil { - if err := m.syncRuleStateWithTask(ctx, orgID, taskName, &storedRule); err != nil { + if err := m.syncRuleStateWithTask(ctx, orgID, taskName, &patchedRule); err != nil { zap.L().Error("failed to restore rule after patch failure", zap.String("taskName", taskName), zap.Error(err)) } return nil, err @@ -942,7 +926,7 @@ func (m *Manager) PatchRule(ctx context.Context, ruleStr string, id valuer.UUID) // prepare http response response := ruletypes.GettableRule{ Id: id.StringValue(), - PostableRule: *patchedRule, + PostableRule: patchedRule, } // fetch state of rule from memory @@ -959,15 +943,14 @@ func (m *Manager) PatchRule(ctx context.Context, ruleStr string, id valuer.UUID) // TestNotification prepares a dummy rule for given rule parameters and // sends a test notification. returns alert count and error (if any) func (m *Manager) TestNotification(ctx context.Context, orgID valuer.UUID, ruleStr string) (int, *model.ApiError) { - - parsedRule, err := ruletypes.ParsePostableRule([]byte(ruleStr)) - + parsedRule := ruletypes.PostableRule{} + err := json.Unmarshal([]byte(ruleStr), &parsedRule) if err != nil { return 0, model.BadRequest(err) } alertCount, apiErr := m.prepareTestRuleFunc(PrepareTestRuleOptions{ - Rule: parsedRule, + Rule: &parsedRule, RuleStore: m.ruleStore, MaintenanceStore: m.maintenanceStore, Logger: m.logger, @@ -1001,15 +984,15 @@ func (m *Manager) GetAlertDetailsForMetricNames(ctx context.Context, metricNames for _, storedRule := range rules { var rule ruletypes.GettableRule - if err := json.Unmarshal([]byte(storedRule.Data), &rule); err != nil { - zap.L().Error("Invalid rule data", zap.Error(err)) + err = json.Unmarshal([]byte(storedRule.Data), &rule) + if err != nil { + zap.L().Error("failed to unmarshal rule from db", zap.String("id", storedRule.ID.StringValue()), zap.Error(err)) continue } if rule.AlertType != ruletypes.AlertTypeMetric || rule.RuleCondition == nil || rule.RuleCondition.CompositeQuery == nil { continue } - rule.Id = storedRule.ID.StringValue() rule.CreatedAt = &storedRule.CreatedAt rule.CreatedBy = &storedRule.CreatedBy diff --git a/pkg/query-service/rules/prom_rule.go b/pkg/query-service/rules/prom_rule.go index 90c9d4619c8e..ea07f85e04b7 100644 --- a/pkg/query-service/rules/prom_rule.go +++ b/pkg/query-service/rules/prom_rule.go @@ -16,12 +16,10 @@ import ( qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels" "github.com/SigNoz/signoz/pkg/query-service/utils/times" "github.com/SigNoz/signoz/pkg/query-service/utils/timestamp" + qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes" "github.com/SigNoz/signoz/pkg/valuer" "github.com/prometheus/prometheus/promql" - yaml "gopkg.in/yaml.v2" - - qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" ) type PromRule struct { @@ -151,84 +149,87 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error) var alerts = make(map[uint64]*ruletypes.Alert, len(res)) for _, series := range res { - l := make(map[string]string, len(series.Metric)) - for _, lbl := range series.Metric { - l[lbl.Name] = lbl.Value - } if len(series.Floats) == 0 { continue } - alertSmpl, shouldAlert := r.ShouldAlert(toCommonSeries(series)) - if !shouldAlert { - continue - } - r.logger.DebugContext(ctx, "alerting for series", "rule_name", r.Name(), "series", series) - - threshold := valueFormatter.Format(r.targetVal(), r.Unit()) - - tmplData := ruletypes.AlertTemplateData(l, valueFormatter.Format(alertSmpl.V, r.Unit()), threshold) - // Inject some convenience variables that are easier to remember for users - // who are not used to Go's templating system. - defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}" - - expand := func(text string) string { - - tmpl := ruletypes.NewTemplateExpander( - ctx, - defs+text, - "__alert_"+r.Name(), - tmplData, - times.Time(timestamp.FromTime(ts)), - nil, - ) - result, err := tmpl.Expand() - if err != nil { - result = fmt.Sprintf("", err) - r.logger.WarnContext(ctx, "Expanding alert template failed", "rule_name", r.Name(), "error", err, "data", tmplData) - } - return result - } - - lb := qslabels.NewBuilder(alertSmpl.Metric).Del(qslabels.MetricNameLabel) - resultLabels := qslabels.NewBuilder(alertSmpl.Metric).Del(qslabels.MetricNameLabel).Labels() - - for name, value := range r.labels.Map() { - lb.Set(name, expand(value)) - } - - lb.Set(qslabels.AlertNameLabel, r.Name()) - lb.Set(qslabels.AlertRuleIdLabel, r.ID()) - lb.Set(qslabels.RuleSourceLabel, r.GeneratorURL()) - - annotations := make(qslabels.Labels, 0, len(r.annotations.Map())) - for name, value := range r.annotations.Map() { - annotations = append(annotations, qslabels.Label{Name: name, Value: expand(value)}) - } - - lbs := lb.Labels() - h := lbs.Hash() - resultFPs[h] = struct{}{} - - if _, ok := alerts[h]; ok { - err = fmt.Errorf("vector contains metrics with the same labelset after applying alert labels") - // We have already acquired the lock above hence using SetHealth and - // SetLastError will deadlock. - r.health = ruletypes.HealthBad - r.lastError = err + results, err := r.Threshold.ShouldAlert(toCommonSeries(series)) + if err != nil { return nil, err } - alerts[h] = &ruletypes.Alert{ - Labels: lbs, - QueryResultLables: resultLabels, - Annotations: annotations, - ActiveAt: ts, - State: model.StatePending, - Value: alertSmpl.V, - GeneratorURL: r.GeneratorURL(), - Receivers: r.preferredChannels, + for _, result := range results { + l := make(map[string]string, len(series.Metric)) + for _, lbl := range series.Metric { + l[lbl.Name] = lbl.Value + } + r.logger.DebugContext(ctx, "alerting for series", "rule_name", r.Name(), "series", series) + + threshold := valueFormatter.Format(r.targetVal(), r.Unit()) + + tmplData := ruletypes.AlertTemplateData(l, valueFormatter.Format(result.V, r.Unit()), threshold) + // Inject some convenience variables that are easier to remember for users + // who are not used to Go's templating system. + defs := "{{$labels := .Labels}}{{$value := .Value}}{{$threshold := .Threshold}}" + + expand := func(text string) string { + + tmpl := ruletypes.NewTemplateExpander( + ctx, + defs+text, + "__alert_"+r.Name(), + tmplData, + times.Time(timestamp.FromTime(ts)), + nil, + ) + result, err := tmpl.Expand() + if err != nil { + result = fmt.Sprintf("", err) + r.logger.WarnContext(ctx, "Expanding alert template failed", "rule_name", r.Name(), "error", err, "data", tmplData) + } + return result + } + + lb := qslabels.NewBuilder(result.Metric).Del(qslabels.MetricNameLabel) + resultLabels := qslabels.NewBuilder(result.Metric).Del(qslabels.MetricNameLabel).Labels() + + for name, value := range r.labels.Map() { + lb.Set(name, expand(value)) + } + + lb.Set(qslabels.AlertNameLabel, r.Name()) + lb.Set(qslabels.AlertRuleIdLabel, r.ID()) + lb.Set(qslabels.RuleSourceLabel, r.GeneratorURL()) + + annotations := make(qslabels.Labels, 0, len(r.annotations.Map())) + for name, value := range r.annotations.Map() { + annotations = append(annotations, qslabels.Label{Name: name, Value: expand(value)}) + } + + lbs := lb.Labels() + h := lbs.Hash() + resultFPs[h] = struct{}{} + + if _, ok := alerts[h]; ok { + err = fmt.Errorf("vector contains metrics with the same labelset after applying alert labels") + // We have already acquired the lock above hence using SetHealth and + // SetLastError will deadlock. + r.health = ruletypes.HealthBad + r.lastError = err + return nil, err + } + + alerts[h] = &ruletypes.Alert{ + Labels: lbs, + QueryResultLables: resultLabels, + Annotations: annotations, + ActiveAt: ts, + State: model.StatePending, + Value: result.V, + GeneratorURL: r.GeneratorURL(), + Receivers: r.preferredChannels, + } } } @@ -327,7 +328,7 @@ func (r *PromRule) String() string { PreferredChannels: r.preferredChannels, } - byt, err := yaml.Marshal(ar) + byt, err := json.Marshal(ar) if err != nil { return fmt.Sprintf("error marshaling alerting rule: %s", err.Error()) } diff --git a/pkg/query-service/rules/promrule_test.go b/pkg/query-service/rules/promrule_test.go index 1ae753634b9e..a4e0b94d06a9 100644 --- a/pkg/query-service/rules/promrule_test.go +++ b/pkg/query-service/rules/promrule_test.go @@ -12,6 +12,17 @@ import ( "github.com/stretchr/testify/assert" ) +func getVectorValues(vectors []ruletypes.Sample) []float64 { + if len(vectors) == 0 { + return []float64{} // Return empty slice instead of nil + } + var values []float64 + for _, v := range vectors { + values = append(values, v.V) + } + return values +} + func TestPromRuleShouldAlert(t *testing.T) { postableRule := ruletypes.PostableRule{ AlertName: "Test Rule", @@ -32,12 +43,13 @@ func TestPromRuleShouldAlert(t *testing.T) { } cases := []struct { - values pql.Series - expectAlert bool - compareOp string - matchType string - target float64 - expectedAlertSample v3.Point + values pql.Series + expectAlert bool + compareOp string + matchType string + target float64 + expectedAlertSample v3.Point + expectedVectorValues []float64 // Expected values in result vector }{ // Test cases for Equals Always { @@ -50,11 +62,12 @@ func TestPromRuleShouldAlert(t *testing.T) { {F: 0.0}, }, }, - expectAlert: true, - compareOp: "3", // Equals - matchType: "2", // Always - target: 0.0, - expectedAlertSample: v3.Point{Value: 0.0}, + expectAlert: true, + compareOp: "3", // Equals + matchType: "2", // Always + target: 0.0, + expectedAlertSample: v3.Point{Value: 0.0}, + expectedVectorValues: []float64{0.0}, }, { values: pql.Series{ @@ -66,10 +79,11 @@ func TestPromRuleShouldAlert(t *testing.T) { {F: 1.0}, }, }, - expectAlert: false, - compareOp: "3", // Equals - matchType: "2", // Always - target: 0.0, + expectAlert: false, + compareOp: "3", // Equals + matchType: "2", // Always + target: 0.0, + expectedVectorValues: []float64{}, }, { values: pql.Series{ @@ -81,10 +95,11 @@ func TestPromRuleShouldAlert(t *testing.T) { {F: 1.0}, }, }, - expectAlert: false, - compareOp: "3", // Equals - matchType: "2", // Always - target: 0.0, + expectAlert: false, + compareOp: "3", // Equals + matchType: "2", // Always + target: 0.0, + expectedVectorValues: []float64{}, }, { values: pql.Series{ @@ -112,11 +127,12 @@ func TestPromRuleShouldAlert(t *testing.T) { {F: 0.0}, }, }, - expectAlert: true, - compareOp: "3", // Equals - matchType: "1", // Once - target: 0.0, - expectedAlertSample: v3.Point{Value: 0.0}, + expectAlert: true, + compareOp: "3", // Equals + matchType: "1", // Once + target: 0.0, + expectedAlertSample: v3.Point{Value: 0.0}, + expectedVectorValues: []float64{0.0}, }, { values: pql.Series{ @@ -160,10 +176,11 @@ func TestPromRuleShouldAlert(t *testing.T) { {F: 1.0}, }, }, - expectAlert: false, - compareOp: "3", // Equals - matchType: "1", // Once - target: 0.0, + expectAlert: false, + compareOp: "3", // Equals + matchType: "1", // Once + target: 0.0, + expectedVectorValues: []float64{}, }, // Test cases for Greater Than Always { @@ -176,11 +193,12 @@ func TestPromRuleShouldAlert(t *testing.T) { {F: 2.0}, }, }, - expectAlert: true, - compareOp: "1", // Greater Than - matchType: "2", // Always - target: 1.5, - expectedAlertSample: v3.Point{Value: 2.0}, + expectAlert: true, + compareOp: "1", // Greater Than + matchType: "2", // Always + target: 1.5, + expectedAlertSample: v3.Point{Value: 2.0}, + expectedVectorValues: []float64{2.0}, }, { values: pql.Series{ @@ -240,11 +258,12 @@ func TestPromRuleShouldAlert(t *testing.T) { {F: 2.0}, }, }, - expectAlert: true, - compareOp: "1", // Greater Than - matchType: "1", // Once - target: 4.5, - expectedAlertSample: v3.Point{Value: 10.0}, + expectAlert: true, + compareOp: "1", // Greater Than + matchType: "1", // Once + target: 4.5, + expectedAlertSample: v3.Point{Value: 10.0}, + expectedVectorValues: []float64{10.0}, }, { values: pql.Series{ @@ -659,13 +678,49 @@ func TestPromRuleShouldAlert(t *testing.T) { postableRule.RuleCondition.CompareOp = ruletypes.CompareOp(c.compareOp) postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType) postableRule.RuleCondition.Target = &c.target + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + TargetValue: &c.target, + MatchType: ruletypes.MatchType(c.matchType), + CompareOp: ruletypes.CompareOp(c.compareOp), + }, + }, + } rule, err := NewPromRule("69", valuer.GenerateUUID(), &postableRule, logger, nil, nil) if err != nil { assert.NoError(t, err) } - _, shoulAlert := rule.ShouldAlert(toCommonSeries(c.values)) - assert.Equal(t, c.expectAlert, shoulAlert, "Test case %d", idx) + resultVectors, err := rule.Threshold.ShouldAlert(toCommonSeries(c.values)) + assert.NoError(t, err) + + // Compare full result vector with expected vector + actualValues := getVectorValues(resultVectors) + if c.expectedVectorValues != nil { + // If expected vector values are specified, compare them exactly + assert.Equal(t, c.expectedVectorValues, actualValues, "Result vector values don't match expected for case %d", idx) + } else { + // Fallback to the old logic for cases without expectedVectorValues + if c.expectAlert { + assert.NotEmpty(t, resultVectors, "Expected alert but got no result vectors for case %d", idx) + // Verify at least one of the result vectors matches the expected alert sample + if len(resultVectors) > 0 { + found := false + for _, sample := range resultVectors { + if sample.V == c.expectedAlertSample.Value { + found = true + break + } + } + assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample.Value, idx, actualValues) + } + } else { + assert.Empty(t, resultVectors, "Expected no alert but got result vectors for case %d", idx) + } + } + } } diff --git a/pkg/query-service/rules/threshold_rule.go b/pkg/query-service/rules/threshold_rule.go index 6828d3221105..b1ebb069056b 100644 --- a/pkg/query-service/rules/threshold_rule.go +++ b/pkg/query-service/rules/threshold_rule.go @@ -38,8 +38,6 @@ import ( querierV5 "github.com/SigNoz/signoz/pkg/querier" qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" - - yaml "gopkg.in/yaml.v2" ) type ThresholdRule struct { @@ -484,10 +482,17 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, } for _, series := range queryResult.Series { - smpl, shouldAlert := r.ShouldAlert(*series) - if shouldAlert { - resultVector = append(resultVector, smpl) + if r.Condition() != nil && r.Condition().RequireMinPoints { + if len(series.Points) < r.ruleCondition.RequiredNumPoints { + r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints) + continue + } } + resultSeries, err := r.Threshold.ShouldAlert(*series) + if err != nil { + return nil, err + } + resultVector = append(resultVector, resultSeries...) } return resultVector, nil @@ -554,10 +559,17 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI } for _, series := range queryResult.Series { - smpl, shouldAlert := r.ShouldAlert(*series) - if shouldAlert { - resultVector = append(resultVector, smpl) + if r.Condition() != nil && r.Condition().RequireMinPoints { + if len(series.Points) < r.Condition().RequiredNumPoints { + r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints) + continue + } } + resultSeries, err := r.Threshold.ShouldAlert(*series) + if err != nil { + return nil, err + } + resultVector = append(resultVector, resultSeries...) } return resultVector, nil @@ -597,6 +609,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (interface{}, er } value := valueFormatter.Format(smpl.V, r.Unit()) + //todo(aniket): handle different threshold threshold := valueFormatter.Format(r.targetVal(), r.Unit()) r.logger.DebugContext(ctx, "Alert template data for rule", "rule_name", r.Name(), "formatter", valueFormatter.Name(), "value", value, "threshold", threshold) @@ -777,7 +790,7 @@ func (r *ThresholdRule) String() string { PreferredChannels: r.preferredChannels, } - byt, err := yaml.Marshal(ar) + byt, err := json.Marshal(ar) if err != nil { return fmt.Sprintf("error marshaling alerting rule: %s", err.Error()) } diff --git a/pkg/query-service/rules/threshold_rule_test.go b/pkg/query-service/rules/threshold_rule_test.go index 94b4f7c5c658..d6bc92c8ab44 100644 --- a/pkg/query-service/rules/threshold_rule_test.go +++ b/pkg/query-service/rules/threshold_rule_test.go @@ -801,9 +801,16 @@ func TestThresholdRuleShouldAlert(t *testing.T) { } for idx, c := range cases { - postableRule.RuleCondition.CompareOp = ruletypes.CompareOp(c.compareOp) - postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType) - postableRule.RuleCondition.Target = &c.target + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + TargetValue: &c.target, + MatchType: ruletypes.MatchType(c.matchType), + CompareOp: ruletypes.CompareOp(c.compareOp), + }, + }, + } rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute)) if err != nil { @@ -815,10 +822,24 @@ func TestThresholdRuleShouldAlert(t *testing.T) { values.Points[i].Timestamp = time.Now().UnixMilli() } - smpl, shoulAlert := rule.ShouldAlert(c.values) - assert.Equal(t, c.expectAlert, shoulAlert, "Test case %d", idx) - if shoulAlert { - assert.Equal(t, c.expectedAlertSample.Value, smpl.V, "Test case %d", idx) + resultVectors, err := rule.Threshold.ShouldAlert(c.values) + assert.NoError(t, err, "Test case %d", idx) + + // Compare result vectors with expected behavior + if c.expectAlert { + assert.NotEmpty(t, resultVectors, "Expected alert but got no result vectors for case %d", idx) + if len(resultVectors) > 0 { + found := false + for _, sample := range resultVectors { + if sample.V == c.expectedAlertSample.Value { + found = true + break + } + } + assert.True(t, found, "Expected alert sample value %.2f not found in result vectors for case %d. Got values: %v", c.expectedAlertSample.Value, idx, getVectorValues(resultVectors)) + } + } else { + assert.Empty(t, resultVectors, "Expected no alert but got result vectors for case %d", idx) } } } @@ -894,7 +915,16 @@ func TestPrepareLinksToLogs(t *testing.T) { } logger := instrumentationtest.New().Logger() - + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + TargetValue: postableRule.RuleCondition.Target, + MatchType: ruletypes.MatchType(postableRule.RuleCondition.MatchType), + CompareOp: ruletypes.CompareOp(postableRule.RuleCondition.CompareOp), + }, + }, + } rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute)) if err != nil { assert.NoError(t, err) @@ -944,7 +974,16 @@ func TestPrepareLinksToLogsV5(t *testing.T) { } logger := instrumentationtest.New().Logger() - + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + TargetValue: postableRule.RuleCondition.Target, + MatchType: ruletypes.MatchType(postableRule.RuleCondition.MatchType), + CompareOp: ruletypes.CompareOp(postableRule.RuleCondition.CompareOp), + }, + }, + } rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute)) if err != nil { assert.NoError(t, err) @@ -994,7 +1033,16 @@ func TestPrepareLinksToTracesV5(t *testing.T) { } logger := instrumentationtest.New().Logger() - + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + TargetValue: postableRule.RuleCondition.Target, + MatchType: ruletypes.MatchType(postableRule.RuleCondition.MatchType), + CompareOp: ruletypes.CompareOp(postableRule.RuleCondition.CompareOp), + }, + }, + } rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute)) if err != nil { assert.NoError(t, err) @@ -1037,7 +1085,16 @@ func TestPrepareLinksToTraces(t *testing.T) { } logger := instrumentationtest.New().Logger() - + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + TargetValue: postableRule.RuleCondition.Target, + MatchType: ruletypes.MatchType(postableRule.RuleCondition.MatchType), + CompareOp: ruletypes.CompareOp(postableRule.RuleCondition.CompareOp), + }, + }, + } rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute)) if err != nil { assert.NoError(t, err) @@ -1111,9 +1168,16 @@ func TestThresholdRuleLabelNormalization(t *testing.T) { logger := instrumentationtest.New().Logger() for idx, c := range cases { - postableRule.RuleCondition.CompareOp = ruletypes.CompareOp(c.compareOp) - postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType) - postableRule.RuleCondition.Target = &c.target + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + TargetValue: &c.target, + MatchType: ruletypes.MatchType(c.matchType), + CompareOp: ruletypes.CompareOp(c.compareOp), + }, + }, + } rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute)) if err != nil { @@ -1125,12 +1189,26 @@ func TestThresholdRuleLabelNormalization(t *testing.T) { values.Points[i].Timestamp = time.Now().UnixMilli() } - sample, shoulAlert := rule.ShouldAlert(c.values) + vector, err := rule.Threshold.ShouldAlert(c.values) + assert.NoError(t, err) + for name, value := range c.values.Labels { - assert.Equal(t, value, sample.Metric.Get(name)) + for _, sample := range vector { + assert.Equal(t, value, sample.Metric.Get(name)) + } } - assert.Equal(t, c.expectAlert, shoulAlert, "Test case %d", idx) + // Get result vectors from threshold evaluation + resultVectors, err := rule.Threshold.ShouldAlert(c.values) + assert.NoError(t, err, "Test case %d", idx) + + // Compare result vectors with expected behavior + if c.expectAlert { + assert.NotEmpty(t, resultVectors, "Expected alert but got no result vectors for case %d", idx) + // For this test, we don't have expectedAlertSample, so just verify vectors exist + } else { + assert.Empty(t, resultVectors, "Expected no alert but got result vectors for case %d", idx) + } } } @@ -1152,6 +1230,16 @@ func TestThresholdRuleEvalDelay(t *testing.T) { }, }, } + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + TargetValue: postableRule.RuleCondition.Target, + MatchType: ruletypes.MatchType(postableRule.RuleCondition.MatchType), + CompareOp: ruletypes.CompareOp(postableRule.RuleCondition.CompareOp), + }, + }, + } // 01:39:47 ts := time.Unix(1717205987, 0) @@ -1169,6 +1257,7 @@ func TestThresholdRuleEvalDelay(t *testing.T) { logger := instrumentationtest.New().Logger() for idx, c := range cases { + rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger) // no eval delay if err != nil { assert.NoError(t, err) @@ -1202,6 +1291,16 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) { }, }, } + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + TargetValue: postableRule.RuleCondition.Target, + MatchType: ruletypes.MatchType(postableRule.RuleCondition.MatchType), + CompareOp: ruletypes.CompareOp(postableRule.RuleCondition.CompareOp), + }, + }, + } // 01:39:47 ts := time.Unix(1717205987, 0) @@ -1380,6 +1479,20 @@ func TestThresholdRuleUnitCombinations(t *testing.T) { postableRule.RuleCondition.Target = &c.target postableRule.RuleCondition.CompositeQuery.Unit = c.yAxisUnit postableRule.RuleCondition.TargetUnit = c.targetUnit + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + Name: postableRule.AlertName, + TargetValue: &c.target, + TargetUnit: c.targetUnit, + RuleUnit: postableRule.RuleCondition.CompositeQuery.Unit, + MatchType: ruletypes.MatchType(c.matchType), + CompareOp: ruletypes.CompareOp(c.compareOp), + SelectedQuery: postableRule.RuleCondition.SelectedQuery, + }, + }, + } postableRule.Annotations = map[string]string{ "description": "This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})", "summary": "The rule threshold is set to {{$threshold}}, and the observed metric value is {{$value}}", @@ -1475,9 +1588,19 @@ func TestThresholdRuleNoData(t *testing.T) { ExpectQuery(queryString). WillReturnRows(rows) var target float64 = 0 - postableRule.RuleCondition.CompareOp = ruletypes.ValueIsEq - postableRule.RuleCondition.MatchType = ruletypes.AtleastOnce - postableRule.RuleCondition.Target = &target + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + Name: postableRule.AlertName, + TargetValue: &target, + RuleUnit: postableRule.RuleCondition.CompositeQuery.Unit, + MatchType: ruletypes.AtleastOnce, + CompareOp: ruletypes.ValueIsEq, + SelectedQuery: postableRule.RuleCondition.SelectedQuery, + }, + }, + } postableRule.Annotations = map[string]string{ "description": "This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})", "summary": "The rule threshold is set to {{$threshold}}, and the observed metric value is {{$value}}", @@ -1585,6 +1708,20 @@ func TestThresholdRuleTracesLink(t *testing.T) { postableRule.RuleCondition.Target = &c.target postableRule.RuleCondition.CompositeQuery.Unit = c.yAxisUnit postableRule.RuleCondition.TargetUnit = c.targetUnit + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + Name: postableRule.AlertName, + TargetValue: &c.target, + TargetUnit: c.targetUnit, + RuleUnit: postableRule.RuleCondition.CompositeQuery.Unit, + MatchType: ruletypes.MatchType(c.matchType), + CompareOp: ruletypes.CompareOp(c.compareOp), + SelectedQuery: postableRule.RuleCondition.SelectedQuery, + }, + }, + } postableRule.Annotations = map[string]string{ "description": "This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})", "summary": "The rule threshold is set to {{$threshold}}, and the observed metric value is {{$value}}", @@ -1708,6 +1845,20 @@ func TestThresholdRuleLogsLink(t *testing.T) { postableRule.RuleCondition.Target = &c.target postableRule.RuleCondition.CompositeQuery.Unit = c.yAxisUnit postableRule.RuleCondition.TargetUnit = c.targetUnit + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + Name: postableRule.AlertName, + TargetValue: &c.target, + TargetUnit: c.targetUnit, + RuleUnit: postableRule.RuleCondition.CompositeQuery.Unit, + MatchType: ruletypes.MatchType(c.matchType), + CompareOp: ruletypes.CompareOp(c.compareOp), + SelectedQuery: postableRule.RuleCondition.SelectedQuery, + }, + }, + } postableRule.Annotations = map[string]string{ "description": "This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})", "summary": "The rule threshold is set to {{$threshold}}, and the observed metric value is {{$value}}", @@ -1756,6 +1907,16 @@ func TestThresholdRuleShiftBy(t *testing.T) { EvalWindow: ruletypes.Duration(5 * time.Minute), Frequency: ruletypes.Duration(1 * time.Minute), RuleCondition: &ruletypes.RuleCondition{ + Thresholds: &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + TargetValue: &target, + CompareOp: ruletypes.ValueAboveOrEq, + MatchType: ruletypes.AtleastOnce, + }, + }, + }, CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeBuilder, BuilderQueries: map[string]*v3.BuilderQuery{ @@ -1787,8 +1948,6 @@ func TestThresholdRuleShiftBy(t *testing.T) { }, }, }, - Target: &target, - CompareOp: ruletypes.ValueAboveOrEq, }, } @@ -1811,3 +1970,188 @@ func TestThresholdRuleShiftBy(t *testing.T) { assert.Equal(t, int64(10), params.CompositeQuery.BuilderQueries["A"].ShiftBy) } + +func TestMultipleThresholdRule(t *testing.T) { + postableRule := ruletypes.PostableRule{ + AlertName: "Mulitple threshold test", + AlertType: ruletypes.AlertTypeMetric, + RuleType: ruletypes.RuleTypeThreshold, + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + RuleCondition: &ruletypes.RuleCondition{ + CompositeQuery: &v3.CompositeQuery{ + QueryType: v3.QueryTypeBuilder, + BuilderQueries: map[string]*v3.BuilderQuery{ + "A": { + QueryName: "A", + StepInterval: 60, + AggregateAttribute: v3.AttributeKey{ + Key: "signoz_calls_total", + }, + AggregateOperator: v3.AggregateOperatorSumRate, + DataSource: v3.DataSourceMetrics, + Expression: "A", + }, + }, + }, + }, + } + telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, &queryMatcherAny{}) + + cols := make([]cmock.ColumnType, 0) + cols = append(cols, cmock.ColumnType{Name: "value", Type: "Float64"}) + cols = append(cols, cmock.ColumnType{Name: "attr", Type: "String"}) + cols = append(cols, cmock.ColumnType{Name: "timestamp", Type: "String"}) + + cases := []struct { + targetUnit string + yAxisUnit string + values [][]interface{} + expectAlerts int + compareOp string + matchType string + target float64 + secondTarget float64 + summaryAny []string + }{ + { + targetUnit: "s", + yAxisUnit: "ns", + values: [][]interface{}{ + {float64(572588400), "attr", time.Now()}, // 0.57 seconds + {float64(572386400), "attr", time.Now().Add(1 * time.Second)}, // 0.57 seconds + {float64(300947400), "attr", time.Now().Add(2 * time.Second)}, // 0.3 seconds + {float64(299316000), "attr", time.Now().Add(3 * time.Second)}, // 0.3 seconds + {float64(66640400.00000001), "attr", time.Now().Add(4 * time.Second)}, // 0.06 seconds + }, + expectAlerts: 2, + compareOp: "1", // Above + matchType: "1", // Once + target: 1, // 1 second + secondTarget: .5, + summaryAny: []string{ + "observed metric value is 573 ms", + "observed metric value is 572 ms", + }, + }, + { + targetUnit: "ms", + yAxisUnit: "ns", + values: [][]interface{}{ + {float64(572588400), "attr", time.Now()}, // 572.58 ms + {float64(572386400), "attr", time.Now().Add(1 * time.Second)}, // 572.38 ms + {float64(300947400), "attr", time.Now().Add(2 * time.Second)}, // 300.94 ms + {float64(299316000), "attr", time.Now().Add(3 * time.Second)}, // 299.31 ms + {float64(66640400.00000001), "attr", time.Now().Add(4 * time.Second)}, // 66.64 ms + }, + expectAlerts: 6, // Expects 6 values exceed 200ms (572.58, 572.38, 300.94, 299.31) + 2 values exceed 500ms (572.58, 572.38) + compareOp: "1", // Above + matchType: "1", // Once + target: 200, // 200 ms + secondTarget: 500, + summaryAny: []string{ + "observed metric value is 299 ms", + "the observed metric value is 573 ms", + "the observed metric value is 572 ms", + "the observed metric value is 301 ms", + }, + }, + { + targetUnit: "decgbytes", + yAxisUnit: "bytes", + values: [][]interface{}{ + {float64(2863284053), "attr", time.Now()}, // 2.86 GB + {float64(2863388842), "attr", time.Now().Add(1 * time.Second)}, // 2.86 GB + {float64(300947400), "attr", time.Now().Add(2 * time.Second)}, // 0.3 GB + {float64(299316000), "attr", time.Now().Add(3 * time.Second)}, // 0.3 GB + {float64(66640400.00000001), "attr", time.Now().Add(4 * time.Second)}, // 66.64 MB + }, + expectAlerts: 2, + compareOp: "1", // Above + matchType: "1", // Once + target: 200, // 200 GB + secondTarget: 2, // 2GB + summaryAny: []string{ + "observed metric value is 2.7 GiB", + "the observed metric value is 0.3 GB", + }, + }, + } + + logger := instrumentationtest.New().Logger() + + for idx, c := range cases { + rows := cmock.NewRows(cols, c.values) + // We are testing the eval logic after the query is run + // so we don't care about the query string here + queryString := "SELECT any" + telemetryStore.Mock(). + ExpectQuery(queryString). + WillReturnRows(rows) + postableRule.RuleCondition.CompareOp = ruletypes.CompareOp(c.compareOp) + postableRule.RuleCondition.MatchType = ruletypes.MatchType(c.matchType) + postableRule.RuleCondition.Target = &c.target + postableRule.RuleCondition.CompositeQuery.Unit = c.yAxisUnit + postableRule.RuleCondition.TargetUnit = c.targetUnit + postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{ + Kind: ruletypes.BasicThresholdKind, + Spec: ruletypes.BasicRuleThresholds{ + { + Name: "first_threshold", + TargetValue: &c.target, + TargetUnit: c.targetUnit, + RuleUnit: postableRule.RuleCondition.CompositeQuery.Unit, + MatchType: ruletypes.MatchType(c.matchType), + CompareOp: ruletypes.CompareOp(c.compareOp), + SelectedQuery: postableRule.RuleCondition.SelectedQuery, + }, + { + Name: "second_threshold", + TargetValue: &c.secondTarget, + TargetUnit: c.targetUnit, + RuleUnit: postableRule.RuleCondition.CompositeQuery.Unit, + MatchType: ruletypes.MatchType(c.matchType), + CompareOp: ruletypes.CompareOp(c.compareOp), + SelectedQuery: postableRule.RuleCondition.SelectedQuery, + }, + }, + } + postableRule.Annotations = map[string]string{ + "description": "This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})", + "summary": "The rule threshold is set to {{$threshold}}, and the observed metric value is {{$value}}", + } + + options := clickhouseReader.NewOptions("", "", "archiveNamespace") + readerCache, err := cachetest.New(cache.Config{Provider: "memory", Memory: cache.Memory{TTL: DefaultFrequency}}) + require.NoError(t, err) + reader := clickhouseReader.NewReaderFromClickhouseConnection(options, nil, telemetryStore, prometheustest.New(instrumentationtest.New().Logger(), prometheus.Config{}), "", time.Duration(time.Second), readerCache) + rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, reader, nil, logger) + rule.TemporalityMap = map[string]map[v3.Temporality]bool{ + "signoz_calls_total": { + v3.Delta: true, + }, + } + if err != nil { + assert.NoError(t, err) + } + + retVal, err := rule.Eval(context.Background(), time.Now()) + if err != nil { + assert.NoError(t, err) + } + + assert.Equal(t, c.expectAlerts, retVal.(int), "case %d", idx) + if c.expectAlerts != 0 { + foundCount := 0 + for _, item := range rule.Active { + for _, summary := range c.summaryAny { + if strings.Contains(item.Annotations.Get("summary"), summary) { + foundCount++ + break + } + } + } + assert.Equal(t, c.expectAlerts, foundCount, "case %d", idx) + } + } +} diff --git a/pkg/types/ruletypes/alerting.go b/pkg/types/ruletypes/alerting.go index 32f276163651..52f56c479905 100644 --- a/pkg/types/ruletypes/alerting.go +++ b/pkg/types/ruletypes/alerting.go @@ -11,7 +11,6 @@ import ( "github.com/SigNoz/signoz/pkg/query-service/model" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" "github.com/SigNoz/signoz/pkg/query-service/utils/labels" - qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5" ) @@ -106,18 +105,19 @@ const ( ) type RuleCondition struct { - CompositeQuery *v3.CompositeQuery `json:"compositeQuery,omitempty" yaml:"compositeQuery,omitempty"` - CompareOp CompareOp `yaml:"op,omitempty" json:"op,omitempty"` - Target *float64 `yaml:"target,omitempty" json:"target,omitempty"` - AlertOnAbsent bool `yaml:"alertOnAbsent,omitempty" json:"alertOnAbsent,omitempty"` - AbsentFor uint64 `yaml:"absentFor,omitempty" json:"absentFor,omitempty"` + CompositeQuery *v3.CompositeQuery `json:"compositeQuery,omitempty"` + CompareOp CompareOp `json:"op,omitempty"` + Target *float64 `json:"target,omitempty"` + AlertOnAbsent bool `json:"alertOnAbsent,omitempty"` + AbsentFor uint64 `json:"absentFor,omitempty"` MatchType MatchType `json:"matchType,omitempty"` TargetUnit string `json:"targetUnit,omitempty"` Algorithm string `json:"algorithm,omitempty"` Seasonality string `json:"seasonality,omitempty"` SelectedQuery string `json:"selectedQueryName,omitempty"` - RequireMinPoints bool `yaml:"requireMinPoints,omitempty" json:"requireMinPoints,omitempty"` - RequiredNumPoints int `yaml:"requiredNumPoints,omitempty" json:"requiredNumPoints,omitempty"` + RequireMinPoints bool `json:"requireMinPoints,omitempty"` + RequiredNumPoints int `json:"requiredNumPoints,omitempty"` + Thresholds *RuleThresholdData `json:"thresholds,omitempty"` } func (rc *RuleCondition) GetSelectedQueryName() string { @@ -188,10 +188,7 @@ func (rc *RuleCondition) IsValid() bool { } if rc.QueryType() == v3.QueryTypeBuilder { - if rc.Target == nil { - return false - } - if rc.CompareOp == "" { + if rc.Thresholds == nil { return false } } diff --git a/pkg/types/ruletypes/api_params.go b/pkg/types/ruletypes/api_params.go index b6be66d93149..f4ad6b55cd2b 100644 --- a/pkg/types/ruletypes/api_params.go +++ b/pkg/types/ruletypes/api_params.go @@ -3,18 +3,15 @@ package ruletypes import ( "context" "encoding/json" - "fmt" "time" "unicode/utf8" + signozError "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/query-service/model" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" - "github.com/pkg/errors" - "go.uber.org/multierr" "github.com/SigNoz/signoz/pkg/query-service/utils/times" "github.com/SigNoz/signoz/pkg/query-service/utils/timestamp" - yaml "gopkg.in/yaml.v2" ) type AlertType string @@ -30,30 +27,20 @@ type RuleDataKind string const ( RuleDataKindJson RuleDataKind = "json" - RuleDataKindYaml RuleDataKind = "yaml" ) -var ( - ErrFailedToParseJSON = errors.New("failed to parse json") - ErrFailedToParseYAML = errors.New("failed to parse yaml") - ErrInvalidDataType = errors.New("invalid data type") -) - -// this file contains api request and responses to be -// served over http - // PostableRule is used to create alerting rule from HTTP api type PostableRule struct { - AlertName string `yaml:"alert,omitempty" json:"alert,omitempty"` - AlertType AlertType `yaml:"alertType,omitempty" json:"alertType,omitempty"` - Description string `yaml:"description,omitempty" json:"description,omitempty"` - RuleType RuleType `yaml:"ruleType,omitempty" json:"ruleType,omitempty"` - EvalWindow Duration `yaml:"evalWindow,omitempty" json:"evalWindow,omitempty"` - Frequency Duration `yaml:"frequency,omitempty" json:"frequency,omitempty"` + AlertName string `json:"alert,omitempty"` + AlertType AlertType `json:"alertType,omitempty"` + Description string `json:"description,omitempty"` + RuleType RuleType `json:"ruleType,omitempty"` + EvalWindow Duration `json:"evalWindow,omitempty"` + Frequency Duration `json:"frequency,omitempty"` - RuleCondition *RuleCondition `yaml:"condition,omitempty" json:"condition,omitempty"` - Labels map[string]string `yaml:"labels,omitempty" json:"labels,omitempty"` - Annotations map[string]string `yaml:"annotations,omitempty" json:"annotations,omitempty"` + RuleCondition *RuleCondition `json:"condition,omitempty"` + Labels map[string]string `json:"labels,omitempty"` + Annotations map[string]string `json:"annotations,omitempty"` Disabled bool `json:"disabled"` @@ -63,84 +50,65 @@ type PostableRule struct { PreferredChannels []string `json:"preferredChannels,omitempty"` Version string `json:"version,omitempty"` - - // legacy - Expr string `yaml:"expr,omitempty" json:"expr,omitempty"` - OldYaml string `json:"yaml,omitempty"` } -func ParsePostableRule(content []byte) (*PostableRule, error) { - return ParsePostableRuleWithKind(content, "json") -} +func (r *PostableRule) processRuleDefaults() error { -func ParsePostableRuleWithKind(content []byte, kind RuleDataKind) (*PostableRule, error) { - return ParseIntoRule(PostableRule{}, content, kind) -} - -// parseIntoRule loads the content (data) into PostableRule and also -// validates the end result -func ParseIntoRule(initRule PostableRule, content []byte, kind RuleDataKind) (*PostableRule, error) { - rule := &initRule - - var err error - if kind == RuleDataKindJson { - if err = json.Unmarshal(content, rule); err != nil { - return nil, ErrFailedToParseJSON - } - } else if kind == RuleDataKindYaml { - if err = yaml.Unmarshal(content, rule); err != nil { - return nil, ErrFailedToParseYAML - } - } else { - return nil, ErrInvalidDataType + if r.EvalWindow == 0 { + r.EvalWindow = Duration(5 * time.Minute) } - if rule.RuleCondition == nil && rule.Expr != "" { - // account for legacy rules - rule.RuleType = RuleTypeProm - rule.EvalWindow = Duration(5 * time.Minute) - rule.Frequency = Duration(1 * time.Minute) - rule.RuleCondition = &RuleCondition{ - CompositeQuery: &v3.CompositeQuery{ - QueryType: v3.QueryTypePromQL, - PromQueries: map[string]*v3.PromQuery{ - "A": { - Query: rule.Expr, - }, - }, - }, - } + if r.Frequency == 0 { + r.Frequency = Duration(1 * time.Minute) } - if rule.EvalWindow == 0 { - rule.EvalWindow = Duration(5 * time.Minute) - } - - if rule.Frequency == 0 { - rule.Frequency = Duration(1 * time.Minute) - } - - if rule.RuleCondition != nil { - if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypeBuilder { - if rule.RuleType == "" { - rule.RuleType = RuleTypeThreshold + if r.RuleCondition != nil { + if r.RuleCondition.CompositeQuery.QueryType == v3.QueryTypeBuilder { + if r.RuleType == "" { + r.RuleType = RuleTypeThreshold } - } else if rule.RuleCondition.CompositeQuery.QueryType == v3.QueryTypePromQL { - rule.RuleType = RuleTypeProm + } else if r.RuleCondition.CompositeQuery.QueryType == v3.QueryTypePromQL { + r.RuleType = RuleTypeProm } - for qLabel, q := range rule.RuleCondition.CompositeQuery.BuilderQueries { + for qLabel, q := range r.RuleCondition.CompositeQuery.BuilderQueries { if q.AggregateAttribute.Key != "" && q.Expression == "" { q.Expression = qLabel } } + //added alerts v2 fields + if r.RuleCondition.Thresholds == nil { + thresholdName := CriticalThresholdName + if r.Labels != nil { + if severity, ok := r.Labels["severity"]; ok { + thresholdName = severity + } + } + thresholdData := RuleThresholdData{ + Kind: BasicThresholdKind, + Spec: BasicRuleThresholds{{ + Name: thresholdName, + RuleUnit: r.RuleCondition.CompositeQuery.Unit, + TargetUnit: r.RuleCondition.TargetUnit, + TargetValue: r.RuleCondition.Target, + MatchType: r.RuleCondition.MatchType, + CompareOp: r.RuleCondition.CompareOp, + }}, + } + r.RuleCondition.Thresholds = &thresholdData + } } - if err := rule.Validate(); err != nil { - return nil, err - } + return r.Validate() +} - return rule, nil +func (r *PostableRule) UnmarshalJSON(bytes []byte) error { + type Alias PostableRule + aux := (*Alias)(r) + if err := json.Unmarshal(bytes, aux); err != nil { + return signozError.NewInvalidInputf(signozError.CodeInvalidInput, "failed to parse json: %v", err) + } + return r.processRuleDefaults() } func isValidLabelName(ln string) bool { @@ -204,47 +172,35 @@ func (r *PostableRule) Validate() error { if r.RuleCondition == nil { // will get panic if we try to access CompositeQuery, so return here - return errors.Errorf("rule condition is required") + return signozError.NewInvalidInputf(signozError.CodeInvalidInput, "rule condition is required") } else { if r.RuleCondition.CompositeQuery == nil { - errs = append(errs, errors.Errorf("composite metric query is required")) + errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "composite metric query is required")) } } if isAllQueriesDisabled(r.RuleCondition.CompositeQuery) { - errs = append(errs, errors.Errorf("all queries are disabled in rule condition")) - } - - if r.RuleType == RuleTypeThreshold { - if r.RuleCondition.Target == nil { - errs = append(errs, errors.Errorf("rule condition missing the threshold")) - } - if r.RuleCondition.CompareOp == "" { - errs = append(errs, errors.Errorf("rule condition missing the compare op")) - } - if r.RuleCondition.MatchType == "" { - errs = append(errs, errors.Errorf("rule condition missing the match option")) - } + errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "all queries are disabled in rule condition")) } for k, v := range r.Labels { if !isValidLabelName(k) { - errs = append(errs, errors.Errorf("invalid label name: %s", k)) + errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "invalid label name: %s", k)) } if !isValidLabelValue(v) { - errs = append(errs, errors.Errorf("invalid label value: %s", v)) + errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "invalid label value: %s", v)) } } for k := range r.Annotations { if !isValidLabelName(k) { - errs = append(errs, errors.Errorf("invalid annotation name: %s", k)) + errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "invalid annotation name: %s", k)) } } errs = append(errs, testTemplateParsing(r)...) - return multierr.Combine(errs...) + return signozError.Join(errs...) } func testTemplateParsing(rl *PostableRule) (errs []error) { @@ -272,7 +228,7 @@ func testTemplateParsing(rl *PostableRule) (errs []error) { for _, val := range rl.Labels { err := parseTest(val) if err != nil { - errs = append(errs, fmt.Errorf("msg=%s", err.Error())) + errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "template parsing error: %s", err.Error())) } } @@ -280,7 +236,7 @@ func testTemplateParsing(rl *PostableRule) (errs []error) { for _, val := range rl.Annotations { err := parseTest(val) if err != nil { - errs = append(errs, fmt.Errorf("msg=%s", err.Error())) + errs = append(errs, signozError.NewInvalidInputf(signozError.CodeInvalidInput, "template parsing error: %s", err.Error())) } } diff --git a/pkg/types/ruletypes/api_params_test.go b/pkg/types/ruletypes/api_params_test.go index 44968e3fd137..27ec5883714e 100644 --- a/pkg/types/ruletypes/api_params_test.go +++ b/pkg/types/ruletypes/api_params_test.go @@ -1,7 +1,10 @@ package ruletypes import ( + "encoding/json" + "github.com/stretchr/testify/assert" "testing" + "time" v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3" ) @@ -84,3 +87,307 @@ func TestIsAllQueriesDisabled(t *testing.T) { } } } + +func TestParseIntoRule(t *testing.T) { + tests := []struct { + name string + initRule PostableRule + content []byte + kind RuleDataKind + expectError bool + validate func(*testing.T, *PostableRule) + }{ + { + name: "valid JSON with complete rule", + initRule: PostableRule{}, + content: []byte(`{ + "alert": "TestAlert", + "alertType": "METRIC_BASED_ALERT", + "description": "Test description", + "ruleType": "threshold_rule", + "evalWindow": "5m", + "frequency": "1m", + "condition": { + "compositeQuery": { + "queryType": "builder", + "builderQueries": { + "A": { + "expression": "A", + "disabled": false, + "aggregateAttribute": { + "key": "test_metric" + } + } + } + }, + "target": 10.0, + "matchType": "1", + "op": "1", + "selectedQuery": "A" + } + }`), + kind: RuleDataKindJson, + expectError: false, + validate: func(t *testing.T, rule *PostableRule) { + if rule.AlertName != "TestAlert" { + t.Errorf("Expected alert name 'TestAlert', got '%s'", rule.AlertName) + } + if rule.RuleType != RuleTypeThreshold { + t.Errorf("Expected rule type '%s', got '%s'", RuleTypeThreshold, rule.RuleType) + } + if rule.RuleCondition.Thresholds.Kind.IsZero() { + t.Error("Expected thresholds to be populated") + } + if rule.RuleCondition.Target == nil { + t.Error("Expected target to be populated") + } + }, + }, + { + name: "rule with default values applied", + initRule: PostableRule{}, + content: []byte(`{ + "alert": "DefaultsRule", + "ruleType": "threshold_rule", + "condition": { + "compositeQuery": { + "queryType": "builder", + "builderQueries": { + "A": { + "disabled": false, + "aggregateAttribute": { + "key": "test_metric" + } + } + } + }, + "target": 5.0, + "matchType": "1", + "op": "1", + "selectedQuery": "A" + } + }`), + kind: RuleDataKindJson, + expectError: false, + validate: func(t *testing.T, rule *PostableRule) { + if rule.EvalWindow != Duration(5*time.Minute) { + t.Errorf("Expected default eval window '5m', got '%v'", rule.EvalWindow) + } + if rule.Frequency != Duration(1*time.Minute) { + t.Errorf("Expected default frequency '1m', got '%v'", rule.Frequency) + } + if rule.RuleCondition.CompositeQuery.BuilderQueries["A"].Expression != "A" { + t.Errorf("Expected expression 'A', got '%s'", rule.RuleCondition.CompositeQuery.BuilderQueries["A"].Expression) + } + }, + }, + { + name: "PromQL rule type detection", + initRule: PostableRule{}, + content: []byte(`{ + "alert": "PromQLRule", + "condition": { + "compositeQuery": { + "queryType": "promql", + "promQueries": { + "A": { + "query": "rate(http_requests_total[5m])", + "disabled": false + } + } + }, + "target": 10.0, + "matchType": "1", + "op": "1", + "selectedQuery": "A" + } + }`), + kind: RuleDataKindJson, + expectError: false, + validate: func(t *testing.T, rule *PostableRule) { + if rule.RuleType != RuleTypeProm { + t.Errorf("Expected rule type 'PROM_QL_RULE', got '%s'", rule.RuleType) + } + if rule.RuleCondition.Thresholds.Kind.IsZero() { + t.Error("Expected thresholds to be populated") + } + if rule.RuleCondition.Target == nil { + t.Error("Expected target to be populated") + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := json.Unmarshal(tt.content, &tt.initRule) + if tt.expectError { + if err == nil { + t.Error("Expected error but got none") + } + return + } + + if err != nil { + t.Errorf("Unexpected error: %v", err) + return + } + + if tt.validate != nil { + tt.validate(t, &tt.initRule) + } + }) + } +} + +func TestParseIntoRuleThresholdGeneration(t *testing.T) { + content := []byte(`{ + "alert": "TestThresholds", + "condition": { + "compositeQuery": { + "queryType": "builder", + "builderQueries": { + "A": { + "expression": "A", + "disabled": false, + "aggregateAttribute": { + "key": "response_time" + } + } + } + }, + "target": 100.0, + "matchType": "1", + "op": "1", + "selectedQuery": "A", + "targetUnit": "ms", + "thresholds": { + "kind": "basic", + "spec": [ + { + "name": "CRITICAL", + "target": 100.0, + "targetUnit": "ms", + "ruleUnit": "s", + "matchType": "1", + "op": "1", + "selectedQuery": "A" + } + ] + } + } + }`) + rule := PostableRule{} + err := json.Unmarshal(content, &rule) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + + // Check that thresholds were parsed correctly + if rule.RuleCondition.Thresholds.Kind != BasicThresholdKind { + t.Errorf("Expected threshold kind 'basic', got '%s'", rule.RuleCondition.Thresholds.Kind) + } + + // Get the threshold and test functionality + threshold, err := rule.RuleCondition.Thresholds.GetRuleThreshold() + if err != nil { + t.Fatalf("Failed to get threshold: %v", err) + } + + // Test that threshold can evaluate properly + vector, err := threshold.ShouldAlert(v3.Series{ + Points: []v3.Point{{Value: 0.15, Timestamp: 1000}}, // 150ms in seconds + Labels: map[string]string{"test": "label"}, + }) + if err != nil { + t.Fatalf("Unexpected error in ShouldAlert: %v", err) + } + + if len(vector) == 0 { + t.Error("Expected alert to be triggered for value above threshold") + } +} + +func TestParseIntoRuleMultipleThresholds(t *testing.T) { + content := []byte(`{ + "alert": "MultiThresholdAlert", + "ruleType": "threshold_rule", + "condition": { + "compositeQuery": { + "queryType": "builder", + "unit": "%", + "builderQueries": { + "A": { + "expression": "A", + "disabled": false, + "aggregateAttribute": { + "key": "cpu_usage" + } + } + } + }, + "target": 90.0, + "matchType": "1", + "op": "1", + "selectedQuery": "A", + "thresholds": { + "kind": "basic", + "spec": [ + { + "name": "WARNING", + "target": 70.0, + "targetUnit": "%", + "ruleUnit": "%", + "matchType": "1", + "op": "1", + "selectedQuery": "A" + }, + { + "name": "CRITICAL", + "target": 90.0, + "targetUnit": "%", + "ruleUnit": "%", + "matchType": "1", + "op": "1", + "selectedQuery": "A" + } + ] + } + } + }`) + rule := PostableRule{} + err := json.Unmarshal(content, &rule) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + + if rule.RuleCondition.Thresholds.Kind != BasicThresholdKind { + t.Errorf("Expected threshold kind 'basic', got '%s'", rule.RuleCondition.Thresholds.Kind) + } + + threshold, err := rule.RuleCondition.Thresholds.GetRuleThreshold() + if err != nil { + t.Fatalf("Failed to get threshold: %v", err) + } + + // Test with a value that should trigger both WARNING and CRITICAL thresholds + vector, err := threshold.ShouldAlert(v3.Series{ + Points: []v3.Point{{Value: 95.0, Timestamp: 1000}}, // 95% CPU usage + Labels: map[string]string{"service": "test"}, + }) + if err != nil { + t.Fatalf("Unexpected error in ShouldAlert: %v", err) + } + + assert.Equal(t, 2, len(vector)) + + vector, err = threshold.ShouldAlert(v3.Series{ + Points: []v3.Point{{Value: 75.0, Timestamp: 1000}}, // 75% CPU usage + Labels: map[string]string{"service": "test"}, + }) + if err != nil { + t.Fatalf("Unexpected error in ShouldAlert: %v", err) + } + + assert.Equal(t, 1, len(vector)) +} diff --git a/pkg/types/ruletypes/constants.go b/pkg/types/ruletypes/constants.go new file mode 100644 index 000000000000..1851ef919a11 --- /dev/null +++ b/pkg/types/ruletypes/constants.go @@ -0,0 +1,4 @@ +package ruletypes + +const CriticalThresholdName = "CRITICAL" +const LabelThresholdName = "threshold.name" diff --git a/pkg/types/ruletypes/threshold.go b/pkg/types/ruletypes/threshold.go new file mode 100644 index 000000000000..4c47c790dc40 --- /dev/null +++ b/pkg/types/ruletypes/threshold.go @@ -0,0 +1,419 @@ +package ruletypes + +import ( + "encoding/json" + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/query-service/converter" + "github.com/SigNoz/signoz/pkg/query-service/model/v3" + "github.com/SigNoz/signoz/pkg/query-service/utils/labels" + "github.com/SigNoz/signoz/pkg/valuer" + "math" + "sort" +) + +type ThresholdKind struct { + valuer.String +} + +var ( + BasicThresholdKind = ThresholdKind{valuer.NewString("basic")} +) + +type RuleThresholdData struct { + Kind ThresholdKind `json:"kind"` + Spec any `json:"spec"` +} + +func (r *RuleThresholdData) UnmarshalJSON(data []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to unmarshal raw rule threshold json: %v", err) + } + if err := json.Unmarshal(raw["kind"], &r.Kind); err != nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to unmarshal rule threshold kind: %v", err) + } + switch r.Kind { + case BasicThresholdKind: + var basicThresholds BasicRuleThresholds + if err := json.Unmarshal(raw["spec"], &basicThresholds); err != nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to unmarshal rule threhsold spec: %v", err) + } + if err := basicThresholds.Validate(); err != nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid rule threshold spec: %v", err) + } + r.Spec = basicThresholds + + default: + return errors.NewInvalidInputf(errors.CodeUnsupported, "unknown threshold kind") + } + + return nil +} + +type RuleThreshold interface { + ShouldAlert(series v3.Series) (Vector, error) +} + +type BasicRuleThreshold struct { + Name string `json:"name"` + TargetValue *float64 `json:"target"` + TargetUnit string `json:"targetUnit"` + RuleUnit string `json:"ruleUnit"` + RecoveryTarget *float64 `json:"recoveryTarget"` + MatchType MatchType `json:"matchType"` + CompareOp CompareOp `json:"op"` + SelectedQuery string `json:"selectedQuery"` +} + +type BasicRuleThresholds []BasicRuleThreshold + +func (r BasicRuleThresholds) Validate() error { + var errs []error + for _, basicThreshold := range r { + if err := basicThreshold.Validate(); err != nil { + errs = append(errs, err) + } + } + return errors.Join(errs...) +} + +func (r BasicRuleThresholds) ShouldAlert(series v3.Series) (Vector, error) { + var resultVector Vector + thresholds := []BasicRuleThreshold(r) + sort.Slice(thresholds, func(i, j int) bool { + compareOp := thresholds[i].GetCompareOp() + targetI := thresholds[i].Target() + targetJ := thresholds[j].Target() + + switch compareOp { + case ValueIsAbove, ValueAboveOrEq, ValueOutsideBounds: + // For "above" operations, sort descending (higher values first) + return targetI > targetJ + case ValueIsBelow, ValueBelowOrEq: + // For "below" operations, sort ascending (lower values first) + return targetI < targetJ + default: + // For equal/not equal operations, use descending as default + return targetI > targetJ + } + }) + for _, threshold := range thresholds { + smpl, shouldAlert := threshold.ShouldAlert(series) + if shouldAlert { + resultVector = append(resultVector, smpl) + } + } + return resultVector, nil +} + +func (b BasicRuleThreshold) GetName() string { + return b.Name +} + +func (b BasicRuleThreshold) Target() float64 { + unitConverter := converter.FromUnit(converter.Unit(b.TargetUnit)) + // convert the target value to the y-axis unit + value := unitConverter.Convert(converter.Value{ + F: *b.TargetValue, + U: converter.Unit(b.TargetUnit), + }, converter.Unit(b.RuleUnit)) + return value.F +} + +func (b BasicRuleThreshold) GetRecoveryTarget() float64 { + if b.RecoveryTarget == nil { + return 0 + } else { + return *b.RecoveryTarget + } +} + +func (b BasicRuleThreshold) GetMatchType() MatchType { + return b.MatchType +} + +func (b BasicRuleThreshold) GetCompareOp() CompareOp { + return b.CompareOp +} + +func (b BasicRuleThreshold) GetSelectedQuery() string { + return b.SelectedQuery +} + +func (b BasicRuleThreshold) Validate() error { + var errs []error + if b.Name == "" { + errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "threshold name cannot be empty")) + } + + if b.TargetValue == nil { + errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "target value cannot be nil")) + } + + switch b.CompareOp { + case ValueIsAbove, ValueIsBelow, ValueIsEq, ValueIsNotEq, ValueAboveOrEq, ValueBelowOrEq, ValueOutsideBounds: + // valid compare operations + case CompareOpNone: + errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "compare operation cannot be none")) + default: + errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid compare operation: %s", string(b.CompareOp))) + } + + switch b.MatchType { + case AtleastOnce, AllTheTimes, OnAverage, InTotal, Last: + // valid match types + case MatchTypeNone: + errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "match type cannot be none")) + default: + errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid match type: %s", string(b.MatchType))) + } + + return errors.Join(errs...) +} + +func removeGroupinSetPoints(series v3.Series) []v3.Point { + var result []v3.Point + for _, s := range series.Points { + if s.Timestamp >= 0 && !math.IsNaN(s.Value) && !math.IsInf(s.Value, 0) { + result = append(result, s) + } + } + return result +} + +func (b BasicRuleThreshold) ShouldAlert(series v3.Series) (Sample, bool) { + var shouldAlert bool + var alertSmpl Sample + var lbls labels.Labels + + for name, value := range series.Labels { + lbls = append(lbls, labels.Label{Name: name, Value: value}) + } + + lbls = append(lbls, labels.Label{Name: LabelThresholdName, Value: b.Name}) + + series.Points = removeGroupinSetPoints(series) + + // nothing to evaluate + if len(series.Points) == 0 { + return alertSmpl, false + } + + switch b.MatchType { + case AtleastOnce: + // If any sample matches the condition, the rule is firing. + if b.CompareOp == ValueIsAbove { + for _, smpl := range series.Points { + if smpl.Value > b.Target() { + alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls} + shouldAlert = true + break + } + } + } else if b.CompareOp == ValueIsBelow { + for _, smpl := range series.Points { + if smpl.Value < b.Target() { + alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls} + shouldAlert = true + break + } + } + } else if b.CompareOp == ValueIsEq { + for _, smpl := range series.Points { + if smpl.Value == b.Target() { + alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls} + shouldAlert = true + break + } + } + } else if b.CompareOp == ValueIsNotEq { + for _, smpl := range series.Points { + if smpl.Value != b.Target() { + alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls} + shouldAlert = true + break + } + } + } else if b.CompareOp == ValueOutsideBounds { + for _, smpl := range series.Points { + if math.Abs(smpl.Value) >= b.Target() { + alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls} + shouldAlert = true + break + } + } + } + case AllTheTimes: + // If all samples match the condition, the rule is firing. + shouldAlert = true + alertSmpl = Sample{Point: Point{V: b.Target()}, Metric: lbls} + if b.CompareOp == ValueIsAbove { + for _, smpl := range series.Points { + if smpl.Value <= b.Target() { + shouldAlert = false + break + } + } + // use min value from the series + if shouldAlert { + var minValue float64 = math.Inf(1) + for _, smpl := range series.Points { + if smpl.Value < minValue { + minValue = smpl.Value + } + } + alertSmpl = Sample{Point: Point{V: minValue}, Metric: lbls} + } + } else if b.CompareOp == ValueIsBelow { + for _, smpl := range series.Points { + if smpl.Value >= b.Target() { + shouldAlert = false + break + } + } + if shouldAlert { + var maxValue float64 = math.Inf(-1) + for _, smpl := range series.Points { + if smpl.Value > maxValue { + maxValue = smpl.Value + } + } + alertSmpl = Sample{Point: Point{V: maxValue}, Metric: lbls} + } + } else if b.CompareOp == ValueIsEq { + for _, smpl := range series.Points { + if smpl.Value != b.Target() { + shouldAlert = false + break + } + } + } else if b.CompareOp == ValueIsNotEq { + for _, smpl := range series.Points { + if smpl.Value == b.Target() { + shouldAlert = false + break + } + } + // use any non-inf or nan value from the series + if shouldAlert { + for _, smpl := range series.Points { + if !math.IsInf(smpl.Value, 0) && !math.IsNaN(smpl.Value) { + alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls} + break + } + } + } + } else if b.CompareOp == ValueOutsideBounds { + for _, smpl := range series.Points { + if math.Abs(smpl.Value) < b.Target() { + alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls} + shouldAlert = false + break + } + } + } + case OnAverage: + // If the average of all samples matches the condition, the rule is firing. + var sum, count float64 + for _, smpl := range series.Points { + if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) { + continue + } + sum += smpl.Value + count++ + } + avg := sum / count + alertSmpl = Sample{Point: Point{V: avg}, Metric: lbls} + if b.CompareOp == ValueIsAbove { + if avg > b.Target() { + shouldAlert = true + } + } else if b.CompareOp == ValueIsBelow { + if avg < b.Target() { + shouldAlert = true + } + } else if b.CompareOp == ValueIsEq { + if avg == b.Target() { + shouldAlert = true + } + } else if b.CompareOp == ValueIsNotEq { + if avg != b.Target() { + shouldAlert = true + } + } else if b.CompareOp == ValueOutsideBounds { + if math.Abs(avg) >= b.Target() { + shouldAlert = true + } + } + case InTotal: + // If the sum of all samples matches the condition, the rule is firing. + var sum float64 + + for _, smpl := range series.Points { + if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) { + continue + } + sum += smpl.Value + } + alertSmpl = Sample{Point: Point{V: sum}, Metric: lbls} + if b.CompareOp == ValueIsAbove { + if sum > b.Target() { + shouldAlert = true + } + } else if b.CompareOp == ValueIsBelow { + if sum < b.Target() { + shouldAlert = true + } + } else if b.CompareOp == ValueIsEq { + if sum == b.Target() { + shouldAlert = true + } + } else if b.CompareOp == ValueIsNotEq { + if sum != b.Target() { + shouldAlert = true + } + } else if b.CompareOp == ValueOutsideBounds { + if math.Abs(sum) >= b.Target() { + shouldAlert = true + } + } + case Last: + // If the last sample matches the condition, the rule is firing. + shouldAlert = false + alertSmpl = Sample{Point: Point{V: series.Points[len(series.Points)-1].Value}, Metric: lbls} + if b.CompareOp == ValueIsAbove { + if series.Points[len(series.Points)-1].Value > b.Target() { + shouldAlert = true + } + } else if b.CompareOp == ValueIsBelow { + if series.Points[len(series.Points)-1].Value < b.Target() { + shouldAlert = true + } + } else if b.CompareOp == ValueIsEq { + if series.Points[len(series.Points)-1].Value == b.Target() { + shouldAlert = true + } + } else if b.CompareOp == ValueIsNotEq { + if series.Points[len(series.Points)-1].Value != b.Target() { + shouldAlert = true + } + } + } + return alertSmpl, shouldAlert +} + +func (r *RuleThresholdData) GetRuleThreshold() (RuleThreshold, error) { + if r == nil { + return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "rule threshold is nil") + } + switch r.Kind { + case BasicThresholdKind: + if thresholds, ok := r.Spec.(BasicRuleThresholds); ok { + basic := BasicRuleThresholds(thresholds) + return basic, nil + } + return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid rule threshold spec") + default: + return nil, errors.NewInvalidInputf(errors.CodeUnsupported, "unknown threshold kind") + } +} From e9222ab3e0db07660463bbc6e08c2654793131d2 Mon Sep 17 00:00:00 2001 From: Vibhu Pandey Date: Fri, 12 Sep 2025 17:46:04 +0530 Subject: [PATCH 18/51] docs(integration): add docs for writing integration tests (#9070) --- CONTRIBUTING.md | 3 +- docs/contributing/go/integration.md | 213 ++++++++++++++++++++++++++++ 2 files changed, 215 insertions(+), 1 deletion(-) create mode 100644 docs/contributing/go/integration.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4a2a85dc0dd5..8f49cccf6063 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -78,4 +78,5 @@ Need assistance? Join our Slack community: - Set up your [development environment](docs/contributing/development.md) - Deploy and observe [SigNoz in action with OpenTelemetry Demo Application](docs/otel-demo-docs.md) -- Explore the [SigNoz Community Advocate Program](ADVOCATE.md), which recognises contributors who support the community, share their expertise, and help shape SigNoz's future. \ No newline at end of file +- Explore the [SigNoz Community Advocate Program](ADVOCATE.md), which recognises contributors who support the community, share their expertise, and help shape SigNoz's future. +- Write [integration tests](docs/contributing/go/integration.md) diff --git a/docs/contributing/go/integration.md b/docs/contributing/go/integration.md new file mode 100644 index 000000000000..6e1074e6126b --- /dev/null +++ b/docs/contributing/go/integration.md @@ -0,0 +1,213 @@ +# Integration Tests + +SigNoz uses integration tests to verify that different components work together correctly in a real environment. These tests run against actual services (ClickHouse, PostgreSQL, etc.) to ensure end-to-end functionality. + +## How to set up the integration test environment? + +### Prerequisites + +Before running integration tests, ensure you have the following installed: + +- Python 3.13+ +- Poetry (for dependency management) +- Docker (for containerized services) + +### Initial Setup + +1. Navigate to the integration tests directory: +```bash +cd tests/integration +``` + +2. Install dependencies using Poetry: +```bash +poetry install --no-root +``` + +### Starting the Test Environment + +To spin up all the containers necessary for writing integration tests and keep them running: + +```bash +poetry run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/setup.py::test_setup +``` + +This command will: +- Start all required services (ClickHouse, PostgreSQL, Zookeeper, etc.) +- Keep containers running due to the `--reuse` flag +- Verify that the setup is working correctly + +### Stopping the Test Environment + +When you're done writing integration tests, clean up the environment: + +```bash +poetry run pytest --basetemp=./tmp/ -vv --teardown -s src/bootstrap/setup.py::test_teardown +``` + +This will destroy the running integration test setup and clean up resources. + +## Understanding the Integration Test Framework + +Python and pytest form the foundation of the integration testing framework. Testcontainers are used to spin up disposable integration environments. Wiremock is used to spin up **test doubles** of other services. + +- **Why Python/pytest?** It's expressive, low-boilerplate, and has powerful fixture capabilities that make integration testing straightforward. Extensive libraries for HTTP requests, JSON handling, and data analysis (numpy) make it easier to test APIs and verify data +- **Why testcontainers?** They let us spin up isolated dependencies that match our production environment without complex setup. +- **Why wiremock?** Well maintained, documented and extensible. + +``` +. +├── conftest.py +├── fixtures +│ ├── __init__.py +│ ├── auth.py +│ ├── clickhouse.py +│ ├── fs.py +│ ├── http.py +│ ├── migrator.py +│ ├── network.py +│ ├── postgres.py +│ ├── signoz.py +│ ├── sql.py +│ ├── sqlite.py +│ ├── types.py +│ └── zookeeper.py +├── poetry.lock +├── pyproject.toml +└── src + └── bootstrap + ├── __init__.py + ├── a_database.py + ├── b_register.py + └── c_license.py +``` + +Each test suite follows some important principles: + +1. **Organization**: Test suites live under `src/` in self-contained packages. Fixtures (a pytest concept) live inside `fixtures/`. +2. **Execution Order**: Files are prefixed with `a_`, `b_`, `c_` to ensure sequential execution. +3. **Time Constraints**: Each suite should complete in under 10 minutes (setup takes ~4 mins). + +### Test Suite Design + +Test suites should target functional domains or subsystems within SigNoz. When designing a test suite, consider these principles: + +- **Functional Cohesion**: Group tests around a specific capability or service boundary +- **Data Flow**: Follow the path of data through related components +- **Change Patterns**: Components frequently modified together should be tested together + +The exact boundaries for modules are intentionally flexible, allowing teams to define logical groupings based on their specific context and knowledge of the system. + +Eg: The **bootstrap** integration test suite validates core system functionality: + +- Database initialization +- Version check + +Other test suites can be **pipelines, auth, querier.** + +## How to write an integration test? + +Now start writing an integration test. Create a new file `src/bootstrap/e_version.py` and paste the following: + +```python +import requests + +from fixtures import types +from fixtures.logger import setup_logger + +logger = setup_logger(__name__) + +def test_version(signoz: types.SigNoz) -> None: + response = requests.get(signoz.self.host_config.get("/api/v1/version"), timeout=2) + logger.info(response) +``` + +We have written a simple test which calls the `version` endpoint of the container in step 1. In **order to just run this function, run the following command:** + +```bash +poetry run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/e_version.py::test_version +``` + +> Note: The `--reuse` flag is used to reuse the environment if it is already running. Always use this flag when writing and running integration tests. If you don't use this flag, the environment will be destroyed and recreated every time you run the test. + +Here's another example of how to write a more comprehensive integration test: + +```python +from http import HTTPStatus +import requests +from fixtures import types +from fixtures.logger import setup_logger + +logger = setup_logger(__name__) + +def test_user_registration(signoz: types.SigNoz) -> None: + """Test user registration functionality.""" + response = requests.post( + signoz.self.host_configs["8080"].get("/api/v1/register"), + json={ + "name": "testuser", + "orgId": "", + "orgName": "test.org", + "email": "test@example.com", + "password": "password123Z$", + }, + timeout=2, + ) + + assert response.status_code == HTTPStatus.OK + assert response.json()["setupCompleted"] is True +``` + +## How to run integration tests? + +### Running All Tests + +```bash +poetry run pytest --basetemp=./tmp/ -vv --reuse src/ +``` + +### Running Specific Test Categories + +```bash +poetry run pytest --basetemp=./tmp/ -vv --reuse src/ + +# Run querier tests +poetry run pytest --basetemp=./tmp/ -vv --reuse src/querier/ +# Run auth tests +poetry run pytest --basetemp=./tmp/ -vv --reuse src/auth/ +``` + +### Running Individual Tests + +```bash +poetry run pytest --basetemp=./tmp/ -vv --reuse src//.py::test_name + +# Run test_register in file a_register.py in auth suite +poetry run pytest --basetemp=./tmp/ -vv --reuse src/auth/a_register.py::test_register +``` + +## How to configure different options for integration tests? + +Tests can be configured using pytest options: + +- `--sqlstore-provider` - Choose database provider (default: postgres) +- `--postgres-version` - PostgreSQL version (default: 15) +- `--clickhouse-version` - ClickHouse version (default: 24.1.2-alpine) +- `--zookeeper-version` - Zookeeper version (default: 3.7.1) + +Example: +```bash +poetry run pytest --basetemp=./tmp/ -vv --reuse --sqlstore-provider=postgres --postgres-version=14 src/auth/ +``` + + +## What should I remember? + +- **Always use the `--reuse` flag** when setting up the environment to keep containers running +- **Use the `--teardown` flag** when cleaning up to avoid resource leaks +- **Follow the naming convention** with alphabetical prefixes for test execution order +- **Use proper timeouts** in HTTP requests to avoid hanging tests +- **Clean up test data** between tests to avoid interference +- **Use descriptive test names** that clearly indicate what is being tested +- **Leverage fixtures** for common setup and authentication +- **Test both success and failure scenarios** to ensure robust functionality From ae58915020efda32d0848eb64a0b6ac6de1a7827 Mon Sep 17 00:00:00 2001 From: Nityananda Gohain Date: Sun, 14 Sep 2025 18:18:39 +0530 Subject: [PATCH 19/51] chore: support for json column in resources (#8376) --- go.mod | 2 +- go.sum | 4 +- pkg/telemetrylogs/condition_builder.go | 7 + pkg/telemetrylogs/field_mapper.go | 24 ++- pkg/telemetrylogs/field_mapper_test.go | 18 ++- pkg/telemetrylogs/filter_expr_logs_test.go | 138 +++++++++--------- pkg/telemetrylogs/stmt_builder_test.go | 12 +- pkg/telemetrytraces/condition_builder.go | 7 + pkg/telemetrytraces/field_mapper.go | 23 ++- pkg/telemetrytraces/field_mapper_test.go | 14 +- pkg/telemetrytraces/stmt_builder_test.go | 24 +-- .../trace_operator_cte_builder_test.go | 10 +- 12 files changed, 177 insertions(+), 106 deletions(-) diff --git a/go.mod b/go.mod index 4fa125c5079c..042661cbbf63 100644 --- a/go.mod +++ b/go.mod @@ -8,7 +8,7 @@ require ( github.com/ClickHouse/clickhouse-go/v2 v2.36.0 github.com/DATA-DOG/go-sqlmock v1.5.2 github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd - github.com/SigNoz/signoz-otel-collector v0.128.1 + github.com/SigNoz/signoz-otel-collector v0.129.4 github.com/antlr4-go/antlr/v4 v4.13.1 github.com/antonmedv/expr v1.15.3 github.com/cespare/xxhash/v2 v2.3.0 diff --git a/go.sum b/go.sum index 643e5a4a3e09..812cb8ce2cf7 100644 --- a/go.sum +++ b/go.sum @@ -104,8 +104,8 @@ github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8= github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc= -github.com/SigNoz/signoz-otel-collector v0.128.1 h1:D0bKMrRNgcKreKKYoakCr5jTWj1srupbNwGIvpHMihw= -github.com/SigNoz/signoz-otel-collector v0.128.1/go.mod h1:vFQLsJFzQwVkO1ltIMH+z9KKuTZTn/P0lKu2mNYDBpE= +github.com/SigNoz/signoz-otel-collector v0.129.4 h1:DGDu9y1I1FU+HX4eECPGmfhnXE4ys4yr7LL6znbf6to= +github.com/SigNoz/signoz-otel-collector v0.129.4/go.mod h1:xyR+coBzzO04p6Eu+ql2RVYUl/jFD+8hD9lArcc9U7g= github.com/Yiling-J/theine-go v0.6.1 h1:njE/rBBviU/Sq2G7PJKdLdwXg8j1azvZQulIjmshD+o= github.com/Yiling-J/theine-go v0.6.1/go.mod h1:08QpMa5JZ2pKN+UJCRrCasWYO1IKCdl54Xa836rpmDU= github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= diff --git a/pkg/telemetrylogs/condition_builder.go b/pkg/telemetrylogs/condition_builder.go index 5c41938d5608..23dffd57bd57 100644 --- a/pkg/telemetrylogs/condition_builder.go +++ b/pkg/telemetrylogs/condition_builder.go @@ -165,6 +165,13 @@ func (c *conditionBuilder) conditionFor( var value any switch column.Type { + case schema.JSONColumnType{}: + value = "NULL" + if operator == qbtypes.FilterOperatorExists { + return sb.NE(tblFieldName, value), nil + } else { + return sb.E(tblFieldName, value), nil + } case schema.ColumnTypeString, schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}: value = "" if operator == qbtypes.FilterOperatorExists { diff --git a/pkg/telemetrylogs/field_mapper.go b/pkg/telemetrylogs/field_mapper.go index cddf7b66d687..35d74f47ef8b 100644 --- a/pkg/telemetrylogs/field_mapper.go +++ b/pkg/telemetrylogs/field_mapper.go @@ -44,6 +44,7 @@ var ( KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, ValueType: schema.ColumnTypeString, }}, + "resource": {Name: "resource", Type: schema.JSONColumnType{}}, "scope_name": {Name: "scope_name", Type: schema.ColumnTypeString}, "scope_version": {Name: "scope_version", Type: schema.ColumnTypeString}, "scope_string": {Name: "scope_string", Type: schema.MapColumnType{ @@ -53,7 +54,8 @@ var ( } ) -type fieldMapper struct{} +type fieldMapper struct { +} func NewFieldMapper() qbtypes.FieldMapper { return &fieldMapper{} @@ -62,7 +64,7 @@ func NewFieldMapper() qbtypes.FieldMapper { func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) { switch key.FieldContext { case telemetrytypes.FieldContextResource: - return logsV2Columns["resources_string"], nil + return logsV2Columns["resource"], nil case telemetrytypes.FieldContextScope: switch key.Name { case "name", "scope.name", "scope_name": @@ -102,6 +104,24 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr } switch column.Type { + case schema.JSONColumnType{}: + // json is only supported for resource context as of now + if key.FieldContext != telemetrytypes.FieldContextResource { + return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String) + } + oldColumn := logsV2Columns["resources_string"] + oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name) + + // have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY + // once clickHouse dependency is updated, we need to check if we can remove it. + if key.Materialized { + oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key) + oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key) + return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil + } else { + return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil + } + case schema.ColumnTypeString, schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, schema.ColumnTypeUInt64, diff --git a/pkg/telemetrylogs/field_mapper_test.go b/pkg/telemetrylogs/field_mapper_test.go index f60a50a9ebd7..23b1ff2be8c9 100644 --- a/pkg/telemetrylogs/field_mapper_test.go +++ b/pkg/telemetrylogs/field_mapper_test.go @@ -26,7 +26,7 @@ func TestGetColumn(t *testing.T) { Name: "service.name", FieldContext: telemetrytypes.FieldContextResource, }, - expectedCol: logsV2Columns["resources_string"], + expectedCol: logsV2Columns["resource"], expectedError: nil, }, { @@ -234,7 +234,18 @@ func TestGetFieldKeyName(t *testing.T) { Name: "service.name", FieldContext: telemetrytypes.FieldContextResource, }, - expectedResult: "resources_string['service.name']", + expectedResult: "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL)", + expectedError: nil, + }, + { + name: "Map column type - resource attribute - Materialized", + key: telemetrytypes.TelemetryFieldKey{ + Name: "service.name", + FieldContext: telemetrytypes.FieldContextResource, + FieldDataType: telemetrytypes.FieldDataTypeString, + Materialized: true, + }, + expectedResult: "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, `resource_string_service$$name_exists`==true, `resource_string_service$$name`, NULL)", expectedError: nil, }, { @@ -248,10 +259,9 @@ func TestGetFieldKeyName(t *testing.T) { }, } - fm := NewFieldMapper() - for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { + fm := NewFieldMapper() result, err := fm.FieldFor(ctx, &tc.key) if tc.expectedError != nil { diff --git a/pkg/telemetrylogs/filter_expr_logs_test.go b/pkg/telemetrylogs/filter_expr_logs_test.go index 2f635a9088ac..2a46a96066f2 100644 --- a/pkg/telemetrylogs/filter_expr_logs_test.go +++ b/pkg/telemetrylogs/filter_expr_logs_test.go @@ -420,8 +420,8 @@ func TestFilterExprLogs(t *testing.T) { category: "FREETEXT with conditions", query: "error service.name=authentication", shouldPass: true, - expectedQuery: "WHERE (match(LOWER(body), LOWER(?)) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", - expectedArgs: []any{"error", "authentication", true}, + expectedQuery: "WHERE (match(LOWER(body), LOWER(?)) AND (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?))", + expectedArgs: []any{"error", "authentication", "NULL"}, expectedErrorContains: "", }, @@ -778,8 +778,8 @@ func TestFilterExprLogs(t *testing.T) { category: "Basic equality", query: "service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)", - expectedArgs: []any{"api", true}, + expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)", + expectedArgs: []any{"api", "NULL"}, expectedErrorContains: "", }, { @@ -844,7 +844,7 @@ func TestFilterExprLogs(t *testing.T) { category: "Not equals", query: "service.name!=\"api\"", shouldPass: true, - expectedQuery: "WHERE resources_string['service.name'] <> ?", + expectedQuery: "WHERE multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?", expectedArgs: []any{"api"}, expectedErrorContains: "", }, @@ -1138,16 +1138,16 @@ func TestFilterExprLogs(t *testing.T) { category: "IN operator (parentheses)", query: "service.name IN (\"api\", \"web\", \"auth\")", shouldPass: true, - expectedQuery: "WHERE ((resources_string['service.name'] = ? OR resources_string['service.name'] = ? OR resources_string['service.name'] = ?) AND mapContains(resources_string, 'service.name') = ?)", - expectedArgs: []any{"api", "web", "auth", true}, + expectedQuery: "WHERE ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? OR multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? OR multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ?) AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)", + expectedArgs: []any{"api", "web", "auth", "NULL"}, expectedErrorContains: "", }, { category: "IN operator (parentheses)", query: "environment IN (\"dev\", \"test\", \"staging\", \"prod\")", shouldPass: true, - expectedQuery: "WHERE ((resources_string['environment'] = ? OR resources_string['environment'] = ? OR resources_string['environment'] = ? OR resources_string['environment'] = ?) AND mapContains(resources_string, 'environment') = ?)", - expectedArgs: []any{"dev", "test", "staging", "prod", true}, + expectedQuery: "WHERE ((multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) = ? OR multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) = ? OR multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) = ? OR multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) = ?) AND multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) <> ?)", + expectedArgs: []any{"dev", "test", "staging", "prod", "NULL"}, expectedErrorContains: "", }, @@ -1172,16 +1172,16 @@ func TestFilterExprLogs(t *testing.T) { category: "IN operator (brackets)", query: "service.name IN [\"api\", \"web\", \"auth\"]", shouldPass: true, - expectedQuery: "WHERE ((resources_string['service.name'] = ? OR resources_string['service.name'] = ? OR resources_string['service.name'] = ?) AND mapContains(resources_string, 'service.name') = ?)", - expectedArgs: []any{"api", "web", "auth", true}, + expectedQuery: "WHERE ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? OR multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? OR multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ?) AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)", + expectedArgs: []any{"api", "web", "auth", "NULL"}, expectedErrorContains: "", }, { category: "IN operator (brackets)", query: "environment IN [\"dev\", \"test\", \"staging\", \"prod\"]", shouldPass: true, - expectedQuery: "WHERE ((resources_string['environment'] = ? OR resources_string['environment'] = ? OR resources_string['environment'] = ? OR resources_string['environment'] = ?) AND mapContains(resources_string, 'environment') = ?)", - expectedArgs: []any{"dev", "test", "staging", "prod", true}, + expectedQuery: "WHERE ((multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) = ? OR multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) = ? OR multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) = ? OR multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) = ?) AND multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) <> ?)", + expectedArgs: []any{"dev", "test", "staging", "prod", "NULL"}, expectedErrorContains: "", }, @@ -1206,7 +1206,7 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT IN operator (parentheses)", query: "service.name NOT IN (\"database\", \"cache\")", shouldPass: true, - expectedQuery: "WHERE (resources_string['service.name'] <> ? AND resources_string['service.name'] <> ?)", + expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)", expectedArgs: []any{"database", "cache"}, expectedErrorContains: "", }, @@ -1214,7 +1214,7 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT IN operator (parentheses)", query: "environment NOT IN (\"prod\")", shouldPass: true, - expectedQuery: "WHERE (resources_string['environment'] <> ?)", + expectedQuery: "WHERE (multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) <> ?)", expectedArgs: []any{"prod"}, expectedErrorContains: "", }, @@ -1240,7 +1240,7 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT IN operator (brackets)", query: "service.name NOT IN [\"database\", \"cache\"]", shouldPass: true, - expectedQuery: "WHERE (resources_string['service.name'] <> ? AND resources_string['service.name'] <> ?)", + expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)", expectedArgs: []any{"database", "cache"}, expectedErrorContains: "", }, @@ -1248,7 +1248,7 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT IN operator (brackets)", query: "environment NOT IN [\"prod\"]", shouldPass: true, - expectedQuery: "WHERE (resources_string['environment'] <> ?)", + expectedQuery: "WHERE (multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) <> ?)", expectedArgs: []any{"prod"}, expectedErrorContains: "", }, @@ -1498,8 +1498,8 @@ func TestFilterExprLogs(t *testing.T) { category: "Explicit AND", query: "status=200 AND service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", - expectedArgs: []any{float64(200), true, "api", true}, + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?))", + expectedArgs: []any{float64(200), true, "api", "NULL"}, expectedErrorContains: "", }, { @@ -1532,8 +1532,8 @@ func TestFilterExprLogs(t *testing.T) { category: "Explicit OR", query: "service.name=\"api\" OR service.name=\"web\"", shouldPass: true, - expectedQuery: "WHERE ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", - expectedArgs: []any{"api", true, "web", true}, + expectedQuery: "WHERE ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?) OR (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?))", + expectedArgs: []any{"api", "NULL", "web", "NULL"}, expectedErrorContains: "", }, { @@ -1558,8 +1558,8 @@ func TestFilterExprLogs(t *testing.T) { category: "NOT with expressions", query: "NOT service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE NOT ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", - expectedArgs: []any{"api", true}, + expectedQuery: "WHERE NOT ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?))", + expectedArgs: []any{"api", "NULL"}, expectedErrorContains: "", }, { @@ -1576,8 +1576,8 @@ func TestFilterExprLogs(t *testing.T) { category: "AND + OR combinations", query: "status=200 AND (service.name=\"api\" OR service.name=\"web\")", shouldPass: true, - expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))))", - expectedArgs: []any{float64(200), true, "api", true, "web", true}, + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?) OR (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?))))", + expectedArgs: []any{float64(200), true, "api", "NULL", "web", "NULL"}, expectedErrorContains: "", }, { @@ -1602,8 +1602,8 @@ func TestFilterExprLogs(t *testing.T) { category: "AND + NOT combinations", query: "status=200 AND NOT service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND NOT ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))", - expectedArgs: []any{float64(200), true, "api", true}, + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND NOT ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)))", + expectedArgs: []any{float64(200), true, "api", "NULL"}, expectedErrorContains: "", }, { @@ -1620,8 +1620,8 @@ func TestFilterExprLogs(t *testing.T) { category: "OR + NOT combinations", query: "NOT status=200 OR NOT service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE (NOT ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)) OR NOT ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))", - expectedArgs: []any{float64(200), true, "api", true}, + expectedQuery: "WHERE (NOT ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)) OR NOT ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)))", + expectedArgs: []any{float64(200), true, "api", "NULL"}, expectedErrorContains: "", }, { @@ -1638,8 +1638,8 @@ func TestFilterExprLogs(t *testing.T) { category: "AND + OR + NOT combinations", query: "status=200 AND (service.name=\"api\" OR NOT duration>1000)", shouldPass: true, - expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR NOT ((toFloat64(attributes_number['duration']) > ? AND mapContains(attributes_number, 'duration') = ?)))))", - expectedArgs: []any{float64(200), true, "api", true, float64(1000), true}, + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?) OR NOT ((toFloat64(attributes_number['duration']) > ? AND mapContains(attributes_number, 'duration') = ?)))))", + expectedArgs: []any{float64(200), true, "api", "NULL", float64(1000), true}, expectedErrorContains: "", }, { @@ -1654,8 +1654,8 @@ func TestFilterExprLogs(t *testing.T) { category: "AND + OR + NOT combinations", query: "NOT (status=200 AND service.name=\"api\") OR count>0", shouldPass: true, - expectedQuery: "WHERE (NOT ((((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))) OR (toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?))", - expectedArgs: []any{float64(200), true, "api", true, float64(0), true}, + expectedQuery: "WHERE (NOT ((((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)))) OR (toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?))", + expectedArgs: []any{float64(200), true, "api", "NULL", float64(0), true}, expectedErrorContains: "", }, @@ -1664,8 +1664,8 @@ func TestFilterExprLogs(t *testing.T) { category: "Implicit AND", query: "status=200 service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", - expectedArgs: []any{float64(200), true, "api", true}, + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?))", + expectedArgs: []any{float64(200), true, "api", "NULL"}, expectedErrorContains: "", }, { @@ -1690,8 +1690,8 @@ func TestFilterExprLogs(t *testing.T) { category: "Mixed implicit/explicit AND", query: "status=200 AND service.name=\"api\" duration<1000", shouldPass: true, - expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) AND (toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?))", - expectedArgs: []any{float64(200), true, "api", true, float64(1000), true}, + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?) AND (toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?))", + expectedArgs: []any{float64(200), true, "api", "NULL", float64(1000), true}, expectedErrorContains: "", }, { @@ -1716,8 +1716,8 @@ func TestFilterExprLogs(t *testing.T) { category: "Simple grouping", query: "service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)", - expectedArgs: []any{"api", true}, + expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)", + expectedArgs: []any{"api", "NULL"}, expectedErrorContains: "", }, { @@ -1742,8 +1742,8 @@ func TestFilterExprLogs(t *testing.T) { category: "Nested grouping", query: "(((service.name=\"api\")))", shouldPass: true, - expectedQuery: "WHERE ((((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))))", - expectedArgs: []any{"api", true}, + expectedQuery: "WHERE ((((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?))))", + expectedArgs: []any{"api", "NULL"}, expectedErrorContains: "", }, { @@ -1760,8 +1760,8 @@ func TestFilterExprLogs(t *testing.T) { category: "Complex nested grouping", query: "(status=200 AND (service.name=\"api\" OR service.name=\"web\"))", shouldPass: true, - expectedQuery: "WHERE (((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))))", - expectedArgs: []any{float64(200), true, "api", true, "web", true}, + expectedQuery: "WHERE (((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?) OR (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)))))", + expectedArgs: []any{float64(200), true, "api", "NULL", "web", "NULL"}, expectedErrorContains: "", }, { @@ -1786,16 +1786,16 @@ func TestFilterExprLogs(t *testing.T) { category: "Deep nesting", query: "(((status=200 OR status=201) AND service.name=\"api\") OR ((status=202 OR status=203) AND service.name=\"web\"))", shouldPass: true, - expectedQuery: "WHERE (((((((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) OR (toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?))) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))) OR (((((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) OR (toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?))) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))))", - expectedArgs: []any{float64(200), true, float64(201), true, "api", true, float64(202), true, float64(203), true, "web", true}, + expectedQuery: "WHERE (((((((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) OR (toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?))) AND (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?))) OR (((((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) OR (toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?))) AND (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)))))", + expectedArgs: []any{float64(200), true, float64(201), true, "api", "NULL", float64(202), true, float64(203), true, "web", "NULL"}, expectedErrorContains: "", }, { category: "Deep nesting", query: "(count>0 AND ((duration<1000 AND service.name=\"api\") OR (duration<500 AND service.name=\"web\")))", shouldPass: true, - expectedQuery: "WHERE (((toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?) AND (((((toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))) OR (((toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))))))", - expectedArgs: []any{float64(0), true, float64(1000), true, "api", true, float64(500), true, "web", true}, + expectedQuery: "WHERE (((toFloat64(attributes_number['count']) > ? AND mapContains(attributes_number, 'count') = ?) AND (((((toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?) AND (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?))) OR (((toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?) AND (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)))))))", + expectedArgs: []any{float64(0), true, float64(1000), true, "api", "NULL", float64(500), true, "web", "NULL"}, expectedErrorContains: "", }, @@ -1804,16 +1804,16 @@ func TestFilterExprLogs(t *testing.T) { category: "String quote styles", query: "service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)", - expectedArgs: []any{"api", true}, + expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)", + expectedArgs: []any{"api", "NULL"}, expectedErrorContains: "", }, { category: "String quote styles", query: "service.name='api'", shouldPass: true, - expectedQuery: "WHERE (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)", - expectedArgs: []any{"api", true}, + expectedQuery: "WHERE (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)", + expectedArgs: []any{"api", "NULL"}, expectedErrorContains: "", }, { @@ -1972,29 +1972,29 @@ func TestFilterExprLogs(t *testing.T) { category: "Operator precedence", query: "NOT status=200 AND service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE (NOT ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", - expectedArgs: []any{float64(200), true, "api", true}, // Should be (NOT status=200) AND service.name="api" + expectedQuery: "WHERE (NOT ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)) AND (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?))", + expectedArgs: []any{float64(200), true, "api", "NULL"}, // Should be (NOT status=200) AND service.name="api" }, { category: "Operator precedence", query: "status=200 AND service.name=\"api\" OR service.name=\"web\"", shouldPass: true, - expectedQuery: "WHERE (((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)) OR (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", - expectedArgs: []any{float64(200), true, "api", true, "web", true}, // Should be (status=200 AND service.name="api") OR service.name="web" + expectedQuery: "WHERE (((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)) OR (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?))", + expectedArgs: []any{float64(200), true, "api", "NULL", "web", "NULL"}, // Should be (status=200 AND service.name="api") OR service.name="web" }, { category: "Operator precedence", query: "NOT status=200 OR NOT service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE (NOT ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)) OR NOT ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?)))", - expectedArgs: []any{float64(200), true, "api", true}, // Should be (NOT status=200) OR (NOT service.name="api") + expectedQuery: "WHERE (NOT ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)) OR NOT ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?)))", + expectedArgs: []any{float64(200), true, "api", "NULL"}, // Should be (NOT status=200) OR (NOT service.name="api") }, { category: "Operator precedence", query: "status=200 OR service.name=\"api\" AND level=\"ERROR\"", shouldPass: true, - expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) OR ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) AND (attributes_string['level'] = ? AND mapContains(attributes_string, 'level') = ?)))", - expectedArgs: []any{float64(200), true, "api", true, "ERROR", true}, // Should be status=200 OR (service.name="api" AND level="ERROR") + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) OR ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?) AND (attributes_string['level'] = ? AND mapContains(attributes_string, 'level') = ?)))", + expectedArgs: []any{float64(200), true, "api", "NULL", "ERROR", true}, // Should be status=200 OR (service.name="api" AND level="ERROR") }, // Different whitespace patterns @@ -2018,8 +2018,8 @@ func TestFilterExprLogs(t *testing.T) { category: "Whitespace patterns", query: "status=200 AND service.name=\"api\"", shouldPass: true, - expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", - expectedArgs: []any{float64(200), true, "api", true}, // Multiple spaces + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?))", + expectedArgs: []any{float64(200), true, "api", "NULL"}, // Multiple spaces }, // More Unicode characters @@ -2188,8 +2188,8 @@ func TestFilterExprLogs(t *testing.T) { category: "More common filters", query: "service.name=\"api\" AND (status>=500 OR duration>1000) AND NOT message CONTAINS \"expected\"", shouldPass: true, - expectedQuery: "WHERE ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) AND (((toFloat64(attributes_number['status']) >= ? AND mapContains(attributes_number, 'status') = ?) OR (toFloat64(attributes_number['duration']) > ? AND mapContains(attributes_number, 'duration') = ?))) AND NOT ((LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?)))", - expectedArgs: []any{"api", true, float64(500), true, float64(1000), true, "%expected%", true}, + expectedQuery: "WHERE ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?) AND (((toFloat64(attributes_number['status']) >= ? AND mapContains(attributes_number, 'status') = ?) OR (toFloat64(attributes_number['duration']) > ? AND mapContains(attributes_number, 'duration') = ?))) AND NOT ((LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?)))", + expectedArgs: []any{"api", "NULL", float64(500), true, float64(1000), true, "%expected%", true}, }, // Edge cases @@ -2254,8 +2254,8 @@ func TestFilterExprLogs(t *testing.T) { category: "Unusual whitespace", query: "status = 200 AND service.name = \"api\"", shouldPass: true, - expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?))", - expectedArgs: []any{float64(200), true, "api", true}, + expectedQuery: "WHERE ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?) AND (multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?))", + expectedArgs: []any{float64(200), true, "api", "NULL"}, }, { category: "Unusual whitespace", @@ -2315,13 +2315,13 @@ func TestFilterExprLogs(t *testing.T) { ) `, shouldPass: true, - expectedQuery: "WHERE ((((((((toFloat64(attributes_number['status']) >= ? AND mapContains(attributes_number, 'status') = ?) AND (toFloat64(attributes_number['status']) < ? AND mapContains(attributes_number, 'status') = ?))) OR (((toFloat64(attributes_number['status']) >= ? AND mapContains(attributes_number, 'status') = ?) AND (toFloat64(attributes_number['status']) < ? AND mapContains(attributes_number, 'status') = ?) AND NOT ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)))))) AND ((((resources_string['service.name'] = ? OR resources_string['service.name'] = ? OR resources_string['service.name'] = ?) AND mapContains(resources_string, 'service.name') = ?) OR (((resources_string['service.type'] = ? AND mapContains(resources_string, 'service.type') = ?) AND NOT ((resources_string['service.deprecated'] = ? AND mapContains(resources_string, 'service.deprecated') = ?)))))))) AND (((((toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?) OR ((toFloat64(attributes_number['duration']) BETWEEN ? AND ? AND mapContains(attributes_number, 'duration') = ?)))) AND ((resources_string['environment'] <> ? OR (((resources_string['environment'] = ? AND mapContains(resources_string, 'environment') = ?) AND (attributes_bool['is_automated_test'] = ? AND mapContains(attributes_bool, 'is_automated_test') = ?))))))) AND NOT ((((((LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?) OR (LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?))) AND (attributes_string['severity'] = ? AND mapContains(attributes_string, 'severity') = ?)))))", + expectedQuery: "WHERE ((((((((toFloat64(attributes_number['status']) >= ? AND mapContains(attributes_number, 'status') = ?) AND (toFloat64(attributes_number['status']) < ? AND mapContains(attributes_number, 'status') = ?))) OR (((toFloat64(attributes_number['status']) >= ? AND mapContains(attributes_number, 'status') = ?) AND (toFloat64(attributes_number['status']) < ? AND mapContains(attributes_number, 'status') = ?) AND NOT ((toFloat64(attributes_number['status']) = ? AND mapContains(attributes_number, 'status') = ?)))))) AND ((((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? OR multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? OR multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ?) AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?) OR (((multiIf(resource.`service.type` IS NOT NULL, resource.`service.type`::String, mapContains(resources_string, 'service.type'), resources_string['service.type'], NULL) = ? AND multiIf(resource.`service.type` IS NOT NULL, resource.`service.type`::String, mapContains(resources_string, 'service.type'), resources_string['service.type'], NULL) <> ?) AND NOT ((multiIf(resource.`service.deprecated` IS NOT NULL, resource.`service.deprecated`::String, mapContains(resources_string, 'service.deprecated'), resources_string['service.deprecated'], NULL) = ? AND multiIf(resource.`service.deprecated` IS NOT NULL, resource.`service.deprecated`::String, mapContains(resources_string, 'service.deprecated'), resources_string['service.deprecated'], NULL) <> ?)))))))) AND (((((toFloat64(attributes_number['duration']) < ? AND mapContains(attributes_number, 'duration') = ?) OR ((toFloat64(attributes_number['duration']) BETWEEN ? AND ? AND mapContains(attributes_number, 'duration') = ?)))) AND ((multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) <> ? OR (((multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) = ? AND multiIf(resource.`environment` IS NOT NULL, resource.`environment`::String, mapContains(resources_string, 'environment'), resources_string['environment'], NULL) <> ?) AND (attributes_bool['is_automated_test'] = ? AND mapContains(attributes_bool, 'is_automated_test') = ?))))))) AND NOT ((((((LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?) OR (LOWER(attributes_string['message']) LIKE LOWER(?) AND mapContains(attributes_string, 'message') = ?))) AND (attributes_string['severity'] = ? AND mapContains(attributes_string, 'severity') = ?)))))", expectedArgs: []any{ float64(200), true, float64(300), true, float64(400), true, float64(500), true, float64(404), true, - "api", "web", "auth", true, - "internal", true, true, true, + "api", "web", "auth", "NULL", + "internal", "NULL", true, "NULL", float64(1000), true, float64(1000), float64(5000), true, - "test", "test", true, true, true, + "test", "test", "NULL", true, true, "%warning%", true, "%deprecated%", true, "low", true, }, diff --git a/pkg/telemetrylogs/stmt_builder_test.go b/pkg/telemetrylogs/stmt_builder_test.go index ed23fafc183c..1610a32a10cf 100644 --- a/pkg/telemetrylogs/stmt_builder_test.go +++ b/pkg/telemetrylogs/stmt_builder_test.go @@ -69,8 +69,8 @@ func TestStatementBuilderTimeSeries(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", - Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)}, + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", + Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "NULL", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "NULL", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)}, }, expectedErr: nil, }, @@ -98,8 +98,8 @@ func TestStatementBuilderTimeSeries(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", - Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), true, "redis-manual", true, "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, true, "redis-manual", true, "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)}, + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?) OR (attributes_string['http.method'] = ? AND mapContains(attributes_string, 'http.method') = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", + Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "NULL", "redis-manual", "NULL", "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "NULL", "redis-manual", "NULL", "GET", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)}, }, expectedErr: nil, }, @@ -137,8 +137,8 @@ func TestStatementBuilderTimeSeries(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc", - Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)}, + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc", + Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "NULL", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10, "NULL", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)}, }, expectedErr: nil, }, diff --git a/pkg/telemetrytraces/condition_builder.go b/pkg/telemetrytraces/condition_builder.go index d7e0e7027ac6..84bc3bee4d2d 100644 --- a/pkg/telemetrytraces/condition_builder.go +++ b/pkg/telemetrytraces/condition_builder.go @@ -163,6 +163,13 @@ func (c *conditionBuilder) conditionFor( var value any switch column.Type { + case schema.JSONColumnType{}: + value = "NULL" + if operator == qbtypes.FilterOperatorExists { + return sb.NE(tblFieldName, value), nil + } else { + return sb.E(tblFieldName, value), nil + } case schema.ColumnTypeString, schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, schema.FixedStringColumnType{Length: 32}, diff --git a/pkg/telemetrytraces/field_mapper.go b/pkg/telemetrytraces/field_mapper.go index d30d25d83752..a75fb417b03e 100644 --- a/pkg/telemetrytraces/field_mapper.go +++ b/pkg/telemetrytraces/field_mapper.go @@ -50,6 +50,7 @@ var ( KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, ValueType: schema.ColumnTypeString, }}, + "resource": {Name: "resource", Type: schema.JSONColumnType{}}, "events": {Name: "events", Type: schema.ArrayColumnType{ ElementType: schema.ColumnTypeString, @@ -157,7 +158,8 @@ var ( } ) -type defaultFieldMapper struct{} +type defaultFieldMapper struct { +} var _ qbtypes.FieldMapper = (*defaultFieldMapper)(nil) @@ -171,7 +173,7 @@ func (m *defaultFieldMapper) getColumn( ) (*schema.Column, error) { switch key.FieldContext { case telemetrytypes.FieldContextResource: - return indexV3Columns["resources_string"], nil + return indexV3Columns["resource"], nil case telemetrytypes.FieldContextScope: return nil, qbtypes.ErrColumnNotFound case telemetrytypes.FieldContextAttribute: @@ -235,6 +237,23 @@ func (m *defaultFieldMapper) FieldFor( } switch column.Type { + case schema.JSONColumnType{}: + // json is only supported for resource context as of now + if key.FieldContext != telemetrytypes.FieldContextResource { + return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns, got %s", key.FieldContext.String) + } + oldColumn := indexV3Columns["resources_string"] + oldKeyName := fmt.Sprintf("%s['%s']", oldColumn.Name, key.Name) + // have to add ::string as clickHouse throws an error :- data types Variant/Dynamic are not allowed in GROUP BY + // once clickHouse dependency is updated, we need to check if we can remove it. + if key.Materialized { + oldKeyName = telemetrytypes.FieldKeyToMaterializedColumnName(key) + oldKeyNameExists := telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key) + return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, %s==true, %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldKeyNameExists, oldKeyName), nil + } else { + return fmt.Sprintf("multiIf(%s.`%s` IS NOT NULL, %s.`%s`::String, mapContains(%s, '%s'), %s, NULL)", column.Name, key.Name, column.Name, key.Name, oldColumn.Name, key.Name, oldKeyName), nil + } + case schema.ColumnTypeString, schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, schema.ColumnTypeUInt64, diff --git a/pkg/telemetrytraces/field_mapper_test.go b/pkg/telemetrytraces/field_mapper_test.go index 472daadada49..93e473e1cbe8 100644 --- a/pkg/telemetrytraces/field_mapper_test.go +++ b/pkg/telemetrytraces/field_mapper_test.go @@ -64,7 +64,16 @@ func TestGetFieldKeyName(t *testing.T) { Name: "service.name", FieldContext: telemetrytypes.FieldContextResource, }, - expectedResult: "resources_string['service.name']", + expectedResult: "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL)", + expectedError: nil, + }, + { + name: "Map column type - resource attribute - legacy", + key: telemetrytypes.TelemetryFieldKey{ + Name: "service.name", + FieldContext: telemetrytypes.FieldContextResource, + }, + expectedResult: "multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL)", expectedError: nil, }, { @@ -78,10 +87,9 @@ func TestGetFieldKeyName(t *testing.T) { }, } - fm := NewFieldMapper() - for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { + fm := NewFieldMapper() result, err := fm.FieldFor(ctx, &tc.key) if tc.expectedError != nil { diff --git a/pkg/telemetrytraces/stmt_builder_test.go b/pkg/telemetrytraces/stmt_builder_test.go index ea1a8e43eab9..ba089266f34b 100644 --- a/pkg/telemetrytraces/stmt_builder_test.go +++ b/pkg/telemetrytraces/stmt_builder_test.go @@ -60,8 +60,8 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", - Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", + Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "NULL", "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, "NULL", "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, }, expectedErr: nil, }, @@ -89,8 +89,8 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR (attributes_string['http.request.method'] = ? AND mapContains(attributes_string, 'http.request.method') = ?)) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((resources_string['service.name'] = ? AND mapContains(resources_string, 'service.name') = ?) OR (attributes_string['http.request.method'] = ? AND mapContains(attributes_string, 'http.request.method') = ?)) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", - Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), true, "redis-manual", true, "GET", true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "redis-manual", true, "GET", true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE ((simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) OR true) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?) OR (attributes_string['http.request.method'] = ? AND mapContains(attributes_string, 'http.request.method') = ?)) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) = ? AND multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?) OR (attributes_string['http.request.method'] = ? AND mapContains(attributes_string, 'http.request.method') = ?)) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", + Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "NULL", "redis-manual", "NULL", "GET", true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, "NULL", "redis-manual", "NULL", "GET", true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, }, expectedErr: nil, }, @@ -187,8 +187,8 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, sum(multiIf(mapContains(attributes_number, 'metric.max_count') = ?, toFloat64(attributes_number['metric.max_count']), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, sum(multiIf(mapContains(attributes_number, 'metric.max_count') = ?, toFloat64(attributes_number['metric.max_count']), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", - Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), true, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, sum(multiIf(mapContains(attributes_number, 'metric.max_count') = ?, toFloat64(attributes_number['metric.max_count']), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, sum(multiIf(mapContains(attributes_number, 'metric.max_count') = ?, toFloat64(attributes_number['metric.max_count']), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", + Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "NULL", true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, "NULL", true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, }, expectedErr: nil, }, @@ -216,8 +216,8 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, sum(multiIf(`attribute_number_cart$$items_count_exists` = ?, toFloat64(`attribute_number_cart$$items_count`), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, sum(multiIf(`attribute_number_cart$$items_count_exists` = ?, toFloat64(`attribute_number_cart$$items_count`), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", - Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), true, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, sum(multiIf(`attribute_number_cart$$items_count_exists` = ?, toFloat64(`attribute_number_cart$$items_count`), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, sum(multiIf(`attribute_number_cart$$items_count_exists` = ?, toFloat64(`attribute_number_cart$$items_count`), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`", + Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "NULL", true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, "NULL", true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, }, expectedErr: nil, }, @@ -255,8 +255,8 @@ func TestStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, sum(multiIf(`attribute_number_cart$$items_count_exists` = ?, toFloat64(`attribute_number_cart$$items_count`), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, sum(multiIf(`attribute_number_cart$$items_count_exists` = ?, toFloat64(`attribute_number_cart$$items_count`), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc", - Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), true, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, sum(multiIf(`attribute_number_cart$$items_count_exists` = ?, toFloat64(`attribute_number_cart$$items_count`), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, sum(multiIf(`attribute_number_cart$$items_count_exists` = ?, toFloat64(`attribute_number_cart$$items_count`), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc", + Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "NULL", true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, "NULL", true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)}, }, expectedErr: nil, }, @@ -412,7 +412,7 @@ func TestStatementBuilderListQuery(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT name AS `name`, resources_string['service.name'] AS `service.name`, duration_nano AS `duration_nano`, `attribute_number_cart$$items_count` AS `cart.items_count`, timestamp AS `timestamp`, span_id AS `span_id`, trace_id AS `trace_id` FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? LIMIT ?", + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT name AS `name`, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) AS `service.name`, duration_nano AS `duration_nano`, `attribute_number_cart$$items_count` AS `cart.items_count`, timestamp AS `timestamp`, span_id AS `span_id`, trace_id AS `trace_id` FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? LIMIT ?", Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10}, }, expectedErr: nil, @@ -441,7 +441,7 @@ func TestStatementBuilderListQuery(t *testing.T) { Limit: 10, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT duration_nano AS `duration_nano`, name AS `name`, response_status_code AS `response_status_code`, `resource_string_service$$name` AS `service.name`, span_id AS `span_id`, timestamp AS `timestamp`, trace_id AS `trace_id` FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? ORDER BY attributes_string['user.id'] AS `user.id` desc LIMIT ?", + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT duration_nano AS `duration_nano`, name AS `name`, response_status_code AS `response_status_code`, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, `resource_string_service$$name_exists`==true, `resource_string_service$$name`, NULL) AS `service.name`, span_id AS `span_id`, timestamp AS `timestamp`, trace_id AS `trace_id` FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? ORDER BY attributes_string['user.id'] AS `user.id` desc LIMIT ?", Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10}, }, expectedErr: nil, diff --git a/pkg/telemetrytraces/trace_operator_cte_builder_test.go b/pkg/telemetrytraces/trace_operator_cte_builder_test.go index 420f2c10fe2c..fa8f9b40b4a9 100644 --- a/pkg/telemetrytraces/trace_operator_cte_builder_test.go +++ b/pkg/telemetrytraces/trace_operator_cte_builder_test.go @@ -66,7 +66,7 @@ func TestTraceOperatorStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), A_DIR_DESC_B AS (SELECT p.* FROM A AS p INNER JOIN B AS c ON p.trace_id = c.trace_id AND p.span_id = c.parent_span_id) SELECT timestamp, trace_id, span_id, name, duration_nano, parent_span_id, resources_string['service.name'] AS `service.name` FROM A_DIR_DESC_B ORDER BY timestamp DESC LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000", + Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), A_DIR_DESC_B AS (SELECT p.* FROM A AS p INNER JOIN B AS c ON p.trace_id = c.trace_id AND p.span_id = c.parent_span_id) SELECT timestamp, trace_id, span_id, name, duration_nano, parent_span_id, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) AS `service.name` FROM A_DIR_DESC_B ORDER BY timestamp DESC LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000", Args: []any{"1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "frontend", "%service.name%", "%service.name\":\"frontend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "backend", "%service.name%", "%service.name\":\"backend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10}, }, expectedErr: nil, @@ -263,8 +263,8 @@ func TestTraceOperatorStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), A_DIR_DESC_B AS (SELECT p.* FROM A AS p INNER JOIN B AS c ON p.trace_id = c.trace_id AND p.span_id = c.parent_span_id) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM A_DIR_DESC_B GROUP BY ts, `service.name` ORDER BY ts desc SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000", - Args: []any{"1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "frontend", "%service.name%", "%service.name\":\"frontend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "backend", "%service.name%", "%service.name\":\"backend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), true}, + Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), A_DIR_DESC_B AS (SELECT p.* FROM A AS p INNER JOIN B AS c ON p.trace_id = c.trace_id AND p.span_id = c.parent_span_id) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, count() AS __result_0 FROM A_DIR_DESC_B GROUP BY ts, `service.name` ORDER BY ts desc SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000", + Args: []any{"1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "frontend", "%service.name%", "%service.name\":\"frontend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "backend", "%service.name%", "%service.name\":\"backend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "NULL"}, }, expectedErr: nil, }, @@ -322,8 +322,8 @@ func TestTraceOperatorStatementBuilder(t *testing.T) { }, }, expected: qbtypes.Statement{ - Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND toFloat64(response_status_code) < ?), A_AND_B AS (SELECT l.* FROM A AS l INNER JOIN B AS r ON l.trace_id = r.trace_id) SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, avg(multiIf(duration_nano <> ?, duration_nano, NULL)) AS __result_0 FROM A_AND_B GROUP BY `service.name` ORDER BY __result_0 desc SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000", - Args: []any{"1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "frontend", "%service.name%", "%service.name\":\"frontend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), float64(400), true, 0}, + Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND toFloat64(response_status_code) < ?), A_AND_B AS (SELECT l.* FROM A AS l INNER JOIN B AS r ON l.trace_id = r.trace_id) SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) <> ?, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, avg(multiIf(duration_nano <> ?, duration_nano, NULL)) AS __result_0 FROM A_AND_B GROUP BY `service.name` ORDER BY __result_0 desc SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000", + Args: []any{"1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "frontend", "%service.name%", "%service.name\":\"frontend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), float64(400), "NULL", 0}, }, expectedErr: nil, }, From a6869418804202e375b4f73960a79ed857d883c7 Mon Sep 17 00:00:00 2001 From: Nityananda Gohain Date: Sun, 14 Sep 2025 18:30:16 +0530 Subject: [PATCH 20/51] fix: exception on resource filters with numeric values (#9028) --- .../resourcefilter/condition_builder.go | 59 ++++++++----------- .../resourcefilter/condition_builder_test.go | 55 +++++++++++++++++ 2 files changed, 81 insertions(+), 33 deletions(-) diff --git a/pkg/querybuilder/resourcefilter/condition_builder.go b/pkg/querybuilder/resourcefilter/condition_builder.go index 66fdefc90ee4..0539b3bfa008 100644 --- a/pkg/querybuilder/resourcefilter/condition_builder.go +++ b/pkg/querybuilder/resourcefilter/condition_builder.go @@ -22,21 +22,20 @@ func NewConditionBuilder(fm qbtypes.FieldMapper) *defaultConditionBuilder { func valueForIndexFilter(op qbtypes.FilterOperator, key *telemetrytypes.TelemetryFieldKey, value any) any { switch v := value.(type) { - case string: - if op == qbtypes.FilterOperatorEqual || op == qbtypes.FilterOperatorNotEqual { - return fmt.Sprintf(`%%%s":"%s%%`, key.Name, v) - } - return fmt.Sprintf(`%%%s%%%s%%`, key.Name, v) case []any: // assuming array will always be for in and not in values := make([]string, 0, len(v)) for _, v := range v { - values = append(values, fmt.Sprintf(`%%%s":"%s%%`, key.Name, v)) + values = append(values, fmt.Sprintf(`%%%s":"%s%%`, key.Name, querybuilder.FormatValueForContains(v))) } return values + default: + // format to string for anything else as we store resource values as string + if op == qbtypes.FilterOperatorEqual || op == qbtypes.FilterOperatorNotEqual { + return fmt.Sprintf(`%%%s":"%s%%`, key.Name, querybuilder.FormatValueForContains(v)) + } + return fmt.Sprintf(`%%%s%%%s%%`, key.Name, querybuilder.FormatValueForContains(v)) } - // resource table expects string value - return fmt.Sprintf(`%%%v%%`, value) } func keyIndexFilter(key *telemetrytypes.TelemetryFieldKey) any { @@ -55,15 +54,9 @@ func (b *defaultConditionBuilder) ConditionFor( return "true", nil } - switch op { - case qbtypes.FilterOperatorContains, - qbtypes.FilterOperatorNotContains, - qbtypes.FilterOperatorILike, - qbtypes.FilterOperatorNotILike, - qbtypes.FilterOperatorLike, - qbtypes.FilterOperatorNotLike: - value = querybuilder.FormatValueForContains(value) - } + // except for in, not in, between, not between all other operators should have formatted value + // as we store resource values as string + formattedValue := querybuilder.FormatValueForContains(value) column, err := b.fm.ColumnFor(ctx, key) if err != nil { @@ -81,34 +74,34 @@ func (b *defaultConditionBuilder) ConditionFor( switch op { case qbtypes.FilterOperatorEqual: return sb.And( - sb.E(fieldName, value), + sb.E(fieldName, formattedValue), keyIdxFilter, sb.Like(column.Name, valueForIndexFilter), ), nil case qbtypes.FilterOperatorNotEqual: return sb.And( - sb.NE(fieldName, value), + sb.NE(fieldName, formattedValue), sb.NotLike(column.Name, valueForIndexFilter), ), nil case qbtypes.FilterOperatorGreaterThan: - return sb.And(sb.GT(fieldName, value), keyIdxFilter), nil + return sb.And(sb.GT(fieldName, formattedValue), keyIdxFilter), nil case qbtypes.FilterOperatorGreaterThanOrEq: - return sb.And(sb.GE(fieldName, value), keyIdxFilter), nil + return sb.And(sb.GE(fieldName, formattedValue), keyIdxFilter), nil case qbtypes.FilterOperatorLessThan: - return sb.And(sb.LT(fieldName, value), keyIdxFilter), nil + return sb.And(sb.LT(fieldName, formattedValue), keyIdxFilter), nil case qbtypes.FilterOperatorLessThanOrEq: - return sb.And(sb.LE(fieldName, value), keyIdxFilter), nil + return sb.And(sb.LE(fieldName, formattedValue), keyIdxFilter), nil case qbtypes.FilterOperatorLike, qbtypes.FilterOperatorILike: return sb.And( - sb.ILike(fieldName, value), + sb.ILike(fieldName, formattedValue), keyIdxFilter, sb.ILike(column.Name, valueForIndexFilter), ), nil case qbtypes.FilterOperatorNotLike, qbtypes.FilterOperatorNotILike: // no index filter: as cannot apply `not contains x%y` as y can be somewhere else return sb.And( - sb.NotILike(fieldName, value), + sb.NotILike(fieldName, formattedValue), ), nil case qbtypes.FilterOperatorBetween: @@ -119,7 +112,7 @@ func (b *defaultConditionBuilder) ConditionFor( if len(values) != 2 { return "", qbtypes.ErrBetweenValues } - return sb.And(keyIdxFilter, sb.Between(fieldName, values[0], values[1])), nil + return sb.And(keyIdxFilter, sb.Between(fieldName, querybuilder.FormatValueForContains(values[0]), querybuilder.FormatValueForContains(values[1]))), nil case qbtypes.FilterOperatorNotBetween: values, ok := value.([]any) if !ok { @@ -128,7 +121,7 @@ func (b *defaultConditionBuilder) ConditionFor( if len(values) != 2 { return "", qbtypes.ErrBetweenValues } - return sb.And(sb.NotBetween(fieldName, values[0], values[1])), nil + return sb.And(sb.NotBetween(fieldName, querybuilder.FormatValueForContains(values[0]), querybuilder.FormatValueForContains(values[1]))), nil case qbtypes.FilterOperatorIn: values, ok := value.([]any) @@ -137,7 +130,7 @@ func (b *defaultConditionBuilder) ConditionFor( } inConditions := make([]string, 0, len(values)) for _, v := range values { - inConditions = append(inConditions, sb.E(fieldName, v)) + inConditions = append(inConditions, sb.E(fieldName, querybuilder.FormatValueForContains(v))) } mainCondition := sb.Or(inConditions...) valConditions := make([]string, 0, len(values)) @@ -156,7 +149,7 @@ func (b *defaultConditionBuilder) ConditionFor( } notInConditions := make([]string, 0, len(values)) for _, v := range values { - notInConditions = append(notInConditions, sb.NE(fieldName, v)) + notInConditions = append(notInConditions, sb.NE(fieldName, querybuilder.FormatValueForContains(v))) } mainCondition := sb.And(notInConditions...) valConditions := make([]string, 0, len(values)) @@ -180,24 +173,24 @@ func (b *defaultConditionBuilder) ConditionFor( case qbtypes.FilterOperatorRegexp: return sb.And( - fmt.Sprintf("match(%s, %s)", fieldName, sb.Var(value)), + fmt.Sprintf("match(%s, %s)", fieldName, sb.Var(formattedValue)), keyIdxFilter, ), nil case qbtypes.FilterOperatorNotRegexp: return sb.And( - fmt.Sprintf("NOT match(%s, %s)", fieldName, sb.Var(value)), + fmt.Sprintf("NOT match(%s, %s)", fieldName, sb.Var(formattedValue)), ), nil case qbtypes.FilterOperatorContains: return sb.And( - sb.ILike(fieldName, fmt.Sprintf(`%%%s%%`, value)), + sb.ILike(fieldName, fmt.Sprintf(`%%%s%%`, formattedValue)), keyIdxFilter, sb.ILike(column.Name, valueForIndexFilter), ), nil case qbtypes.FilterOperatorNotContains: // no index filter: as cannot apply `not contains x%y` as y can be somewhere else return sb.And( - sb.NotILike(fieldName, fmt.Sprintf(`%%%s%%`, value)), + sb.NotILike(fieldName, fmt.Sprintf(`%%%s%%`, formattedValue)), ), nil } return "", qbtypes.ErrUnsupportedOperator diff --git a/pkg/querybuilder/resourcefilter/condition_builder_test.go b/pkg/querybuilder/resourcefilter/condition_builder_test.go index ac4cbab7bf66..6a9363f595dd 100644 --- a/pkg/querybuilder/resourcefilter/condition_builder_test.go +++ b/pkg/querybuilder/resourcefilter/condition_builder_test.go @@ -143,6 +143,61 @@ func TestConditionBuilder(t *testing.T) { expected: "simpleJSONHas(labels, 'k8s.namespace.name') <> ?", expectedArgs: []any{true}, }, + { + name: "number_equals", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "test_num", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorEqual, + value: 1, + expected: "simpleJSONExtractString(labels, 'test_num') = ? AND labels LIKE ? AND labels LIKE ?", + expectedArgs: []any{"1", "%test_num%", "%test_num\":\"1%"}, + }, + { + name: "number_gt", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "test_num", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorGreaterThan, + value: 1, + expected: "simpleJSONExtractString(labels, 'test_num') > ? AND labels LIKE ?", + expectedArgs: []any{"1", "%test_num%"}, + }, + { + name: "number_in", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "test_num", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorIn, + value: []any{1, 2}, + expected: "(simpleJSONExtractString(labels, 'test_num') = ? OR simpleJSONExtractString(labels, 'test_num') = ?) AND labels LIKE ? AND (labels LIKE ? OR labels LIKE ?)", + expectedArgs: []any{"1", "2", "%test_num%", "%test_num\":\"1%", "%test_num\":\"2%"}, + }, + { + name: "number_between", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "test_num", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorBetween, + value: []any{1, 2}, + expected: "labels LIKE ? AND simpleJSONExtractString(labels, 'test_num') BETWEEN ? AND ?", + expectedArgs: []any{"%test_num%", "1", "2"}, + }, + { + name: "string_regexp", + key: &telemetrytypes.TelemetryFieldKey{ + Name: "k8s.namespace.name", + FieldContext: telemetrytypes.FieldContextResource, + }, + op: querybuildertypesv5.FilterOperatorRegexp, + value: "ban.*", + expected: "match(simpleJSONExtractString(labels, 'k8s.namespace.name'), ?) AND labels LIKE ?", + expectedArgs: []any{"ban.*", "%k8s.namespace.name%"}, + }, } fm := NewFieldMapper() From 38ca467d13ec645ba1ef6201369270b00b9f9cec Mon Sep 17 00:00:00 2001 From: Nityananda Gohain Date: Sun, 14 Sep 2025 18:42:48 +0530 Subject: [PATCH 21/51] fix: trace perf - scan only required traces (#9072) --- pkg/telemetrytraces/statement_builder.go | 1 + pkg/telemetrytraces/stmt_builder_test.go | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/pkg/telemetrytraces/statement_builder.go b/pkg/telemetrytraces/statement_builder.go index b5c1d3d1297f..beb243cdba59 100644 --- a/pkg/telemetrytraces/statement_builder.go +++ b/pkg/telemetrytraces/statement_builder.go @@ -390,6 +390,7 @@ func (b *traceQueryStatementBuilder) buildTraceQuery( innerSB.Select("trace_id", "duration_nano", sqlbuilder.Escape("resource_string_service$$name as `service.name`"), "name") innerSB.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName)) innerSB.Where("parent_span_id = ''") + innerSB.Where("trace_id GLOBAL IN __toe") // Add time filter to inner query innerSB.Where( diff --git a/pkg/telemetrytraces/stmt_builder_test.go b/pkg/telemetrytraces/stmt_builder_test.go index ba089266f34b..96dc2ba7a335 100644 --- a/pkg/telemetrytraces/stmt_builder_test.go +++ b/pkg/telemetrytraces/stmt_builder_test.go @@ -547,7 +547,7 @@ func TestStatementBuilderTraceQuery(t *testing.T) { Limit: 10, }, expected: qbtypes.Statement{ - Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __toe AS (SELECT trace_id FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __toe_duration_sorted AS (SELECT trace_id, duration_nano, resource_string_service$$name as `service.name`, name FROM signoz_traces.distributed_signoz_index_v3 WHERE parent_span_id = '' AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? ORDER BY duration_nano DESC LIMIT 1 BY trace_id) SELECT __toe_duration_sorted.`service.name` AS `service.name`, __toe_duration_sorted.name AS `name`, count() AS span_count, __toe_duration_sorted.duration_nano AS `duration_nano`, __toe_duration_sorted.trace_id AS `trace_id` FROM __toe INNER JOIN __toe_duration_sorted ON __toe.trace_id = __toe_duration_sorted.trace_id GROUP BY trace_id, duration_nano, name, `service.name` ORDER BY duration_nano DESC LIMIT 1 BY trace_id LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000", + Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __toe AS (SELECT trace_id FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __toe_duration_sorted AS (SELECT trace_id, duration_nano, resource_string_service$$name as `service.name`, name FROM signoz_traces.distributed_signoz_index_v3 WHERE parent_span_id = '' AND trace_id GLOBAL IN __toe AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? ORDER BY duration_nano DESC LIMIT 1 BY trace_id) SELECT __toe_duration_sorted.`service.name` AS `service.name`, __toe_duration_sorted.name AS `name`, count() AS span_count, __toe_duration_sorted.duration_nano AS `duration_nano`, __toe_duration_sorted.trace_id AS `trace_id` FROM __toe INNER JOIN __toe_duration_sorted ON __toe.trace_id = __toe_duration_sorted.trace_id GROUP BY trace_id, duration_nano, name, `service.name` ORDER BY duration_nano DESC LIMIT 1 BY trace_id LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000", Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10}, }, expectedErr: nil, From 252786deb6c7eeaa89a184d3eaddedb664930a1b Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Sun, 14 Sep 2025 20:41:13 +0530 Subject: [PATCH 22/51] chore: make qb v5 default (#9085) --- pkg/modules/dashboard/impldashboard/handler.go | 9 +++------ pkg/querybuilder/init.go | 14 -------------- 2 files changed, 3 insertions(+), 20 deletions(-) diff --git a/pkg/modules/dashboard/impldashboard/handler.go b/pkg/modules/dashboard/impldashboard/handler.go index 39a5987a40c5..139f4c3a6a82 100644 --- a/pkg/modules/dashboard/impldashboard/handler.go +++ b/pkg/modules/dashboard/impldashboard/handler.go @@ -10,7 +10,6 @@ import ( "github.com/SigNoz/signoz/pkg/factory" "github.com/SigNoz/signoz/pkg/http/render" "github.com/SigNoz/signoz/pkg/modules/dashboard" - "github.com/SigNoz/signoz/pkg/querybuilder" "github.com/SigNoz/signoz/pkg/transition" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/types/ctxtypes" @@ -51,11 +50,9 @@ func (handler *handler) Create(rw http.ResponseWriter, r *http.Request) { return } - if querybuilder.QBV5Enabled { - dashboardMigrator := transition.NewDashboardMigrateV5(handler.providerSettings.Logger, nil, nil) - if req["version"] != "v5" { - dashboardMigrator.Migrate(ctx, req) - } + dashboardMigrator := transition.NewDashboardMigrateV5(handler.providerSettings.Logger, nil, nil) + if req["version"] != "v5" { + dashboardMigrator.Migrate(ctx, req) } dashboard, err := handler.module.Create(ctx, orgID, claims.Email, valuer.MustNewUUID(claims.UserID), req) diff --git a/pkg/querybuilder/init.go b/pkg/querybuilder/init.go index dc481163e226..4fef9bfb026f 100644 --- a/pkg/querybuilder/init.go +++ b/pkg/querybuilder/init.go @@ -1,15 +1 @@ package querybuilder - -import ( - "os" - "strings" -) - -var QBV5Enabled = false - -func init() { - v := os.Getenv("ENABLE_QB_V5") - if strings.ToLower(v) == "true" || strings.ToLower(v) == "1" { - QBV5Enabled = true - } -} From c982b1e76d3f165b4b4081143bd86387ce6a2dfb Mon Sep 17 00:00:00 2001 From: Srikanth Chekuri Date: Sun, 14 Sep 2025 22:14:42 +0530 Subject: [PATCH 23/51] chore: allow number segment, #, @, {} in key (#9082) --- frontend/src/parser/FilterQueryLexer.interp | 2 +- frontend/src/parser/FilterQueryLexer.ts | 183 ++++++------- frontend/src/parser/FilterQueryListener.ts | 2 +- frontend/src/parser/FilterQueryParser.ts | 2 +- frontend/src/parser/FilterQueryVisitor.ts | 2 +- grammar/FilterQuery.g4 | 4 +- pkg/parser/grammar/FilterQueryLexer.interp | 2 +- pkg/parser/grammar/filterquery_lexer.go | 247 +++++++++--------- pkg/telemetrylogs/filter_expr_logs_test.go | 40 +++ pkg/telemetrylogs/test_data.go | 28 ++ .../querybuildertypesv5/builder_elements.go | 1 + scripts/grammar/generate-frontend-parser.sh | 11 + 12 files changed, 305 insertions(+), 219 deletions(-) create mode 100755 scripts/grammar/generate-frontend-parser.sh diff --git a/frontend/src/parser/FilterQueryLexer.interp b/frontend/src/parser/FilterQueryLexer.interp index 6f3d2d7e1697..bb681c909134 100644 --- a/frontend/src/parser/FilterQueryLexer.interp +++ b/frontend/src/parser/FilterQueryLexer.interp @@ -115,4 +115,4 @@ mode names: DEFAULT_MODE atn: -[4, 0, 32, 314, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 89, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 132, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 149, 8, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 201, 8, 26, 1, 27, 1, 27, 1, 28, 3, 28, 206, 8, 28, 1, 28, 4, 28, 209, 8, 28, 11, 28, 12, 28, 210, 1, 28, 1, 28, 5, 28, 215, 8, 28, 10, 28, 12, 28, 218, 9, 28, 3, 28, 220, 8, 28, 1, 28, 1, 28, 3, 28, 224, 8, 28, 1, 28, 4, 28, 227, 8, 28, 11, 28, 12, 28, 228, 3, 28, 231, 8, 28, 1, 28, 3, 28, 234, 8, 28, 1, 28, 1, 28, 4, 28, 238, 8, 28, 11, 28, 12, 28, 239, 1, 28, 1, 28, 3, 28, 244, 8, 28, 1, 28, 4, 28, 247, 8, 28, 11, 28, 12, 28, 248, 3, 28, 251, 8, 28, 3, 28, 253, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 259, 8, 29, 10, 29, 12, 29, 262, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 269, 8, 29, 10, 29, 12, 29, 272, 9, 29, 1, 29, 3, 29, 275, 8, 29, 1, 30, 1, 30, 5, 30, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 296, 8, 33, 10, 33, 12, 33, 299, 9, 33, 1, 34, 4, 34, 302, 8, 34, 11, 34, 12, 34, 303, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 4, 36, 311, 8, 36, 11, 36, 12, 36, 312, 0, 0, 37, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 0, 57, 28, 59, 29, 61, 0, 63, 0, 65, 0, 67, 30, 69, 31, 71, 0, 73, 32, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116, 116, 2, 0, 87, 87, 119, 119, 2, 0, 78, 78, 110, 110, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 79, 79, 111, 111, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 4, 0, 36, 36, 65, 90, 95, 95, 97, 122, 6, 0, 36, 36, 45, 45, 47, 58, 65, 90, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 336, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 73, 1, 0, 0, 0, 1, 75, 1, 0, 0, 0, 3, 77, 1, 0, 0, 0, 5, 79, 1, 0, 0, 0, 7, 81, 1, 0, 0, 0, 9, 83, 1, 0, 0, 0, 11, 88, 1, 0, 0, 0, 13, 90, 1, 0, 0, 0, 15, 93, 1, 0, 0, 0, 17, 96, 1, 0, 0, 0, 19, 98, 1, 0, 0, 0, 21, 101, 1, 0, 0, 0, 23, 103, 1, 0, 0, 0, 25, 106, 1, 0, 0, 0, 27, 111, 1, 0, 0, 0, 29, 117, 1, 0, 0, 0, 31, 125, 1, 0, 0, 0, 33, 133, 1, 0, 0, 0, 35, 140, 1, 0, 0, 0, 37, 150, 1, 0, 0, 0, 39, 153, 1, 0, 0, 0, 41, 157, 1, 0, 0, 0, 43, 161, 1, 0, 0, 0, 45, 164, 1, 0, 0, 0, 47, 173, 1, 0, 0, 0, 49, 177, 1, 0, 0, 0, 51, 184, 1, 0, 0, 0, 53, 200, 1, 0, 0, 0, 55, 202, 1, 0, 0, 0, 57, 252, 1, 0, 0, 0, 59, 274, 1, 0, 0, 0, 61, 276, 1, 0, 0, 0, 63, 283, 1, 0, 0, 0, 65, 286, 1, 0, 0, 0, 67, 290, 1, 0, 0, 0, 69, 301, 1, 0, 0, 0, 71, 307, 1, 0, 0, 0, 73, 310, 1, 0, 0, 0, 75, 76, 5, 40, 0, 0, 76, 2, 1, 0, 0, 0, 77, 78, 5, 41, 0, 0, 78, 4, 1, 0, 0, 0, 79, 80, 5, 91, 0, 0, 80, 6, 1, 0, 0, 0, 81, 82, 5, 93, 0, 0, 82, 8, 1, 0, 0, 0, 83, 84, 5, 44, 0, 0, 84, 10, 1, 0, 0, 0, 85, 89, 5, 61, 0, 0, 86, 87, 5, 61, 0, 0, 87, 89, 5, 61, 0, 0, 88, 85, 1, 0, 0, 0, 88, 86, 1, 0, 0, 0, 89, 12, 1, 0, 0, 0, 90, 91, 5, 33, 0, 0, 91, 92, 5, 61, 0, 0, 92, 14, 1, 0, 0, 0, 93, 94, 5, 60, 0, 0, 94, 95, 5, 62, 0, 0, 95, 16, 1, 0, 0, 0, 96, 97, 5, 60, 0, 0, 97, 18, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 100, 5, 61, 0, 0, 100, 20, 1, 0, 0, 0, 101, 102, 5, 62, 0, 0, 102, 22, 1, 0, 0, 0, 103, 104, 5, 62, 0, 0, 104, 105, 5, 61, 0, 0, 105, 24, 1, 0, 0, 0, 106, 107, 7, 0, 0, 0, 107, 108, 7, 1, 0, 0, 108, 109, 7, 2, 0, 0, 109, 110, 7, 3, 0, 0, 110, 26, 1, 0, 0, 0, 111, 112, 7, 1, 0, 0, 112, 113, 7, 0, 0, 0, 113, 114, 7, 1, 0, 0, 114, 115, 7, 2, 0, 0, 115, 116, 7, 3, 0, 0, 116, 28, 1, 0, 0, 0, 117, 118, 7, 4, 0, 0, 118, 119, 7, 3, 0, 0, 119, 120, 7, 5, 0, 0, 120, 121, 7, 6, 0, 0, 121, 122, 7, 3, 0, 0, 122, 123, 7, 3, 0, 0, 123, 124, 7, 7, 0, 0, 124, 30, 1, 0, 0, 0, 125, 126, 7, 3, 0, 0, 126, 127, 7, 8, 0, 0, 127, 128, 7, 1, 0, 0, 128, 129, 7, 9, 0, 0, 129, 131, 7, 5, 0, 0, 130, 132, 7, 9, 0, 0, 131, 130, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 32, 1, 0, 0, 0, 133, 134, 7, 10, 0, 0, 134, 135, 7, 3, 0, 0, 135, 136, 7, 11, 0, 0, 136, 137, 7, 3, 0, 0, 137, 138, 7, 8, 0, 0, 138, 139, 7, 12, 0, 0, 139, 34, 1, 0, 0, 0, 140, 141, 7, 13, 0, 0, 141, 142, 7, 14, 0, 0, 142, 143, 7, 7, 0, 0, 143, 144, 7, 5, 0, 0, 144, 145, 7, 15, 0, 0, 145, 146, 7, 1, 0, 0, 146, 148, 7, 7, 0, 0, 147, 149, 7, 9, 0, 0, 148, 147, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, 36, 1, 0, 0, 0, 150, 151, 7, 1, 0, 0, 151, 152, 7, 7, 0, 0, 152, 38, 1, 0, 0, 0, 153, 154, 7, 7, 0, 0, 154, 155, 7, 14, 0, 0, 155, 156, 7, 5, 0, 0, 156, 40, 1, 0, 0, 0, 157, 158, 7, 15, 0, 0, 158, 159, 7, 7, 0, 0, 159, 160, 7, 16, 0, 0, 160, 42, 1, 0, 0, 0, 161, 162, 7, 14, 0, 0, 162, 163, 7, 10, 0, 0, 163, 44, 1, 0, 0, 0, 164, 165, 7, 17, 0, 0, 165, 166, 7, 15, 0, 0, 166, 167, 7, 9, 0, 0, 167, 168, 7, 5, 0, 0, 168, 169, 7, 14, 0, 0, 169, 170, 7, 2, 0, 0, 170, 171, 7, 3, 0, 0, 171, 172, 7, 7, 0, 0, 172, 46, 1, 0, 0, 0, 173, 174, 7, 17, 0, 0, 174, 175, 7, 15, 0, 0, 175, 176, 7, 9, 0, 0, 176, 48, 1, 0, 0, 0, 177, 178, 7, 17, 0, 0, 178, 179, 7, 15, 0, 0, 179, 180, 7, 9, 0, 0, 180, 181, 7, 15, 0, 0, 181, 182, 7, 7, 0, 0, 182, 183, 7, 18, 0, 0, 183, 50, 1, 0, 0, 0, 184, 185, 7, 17, 0, 0, 185, 186, 7, 15, 0, 0, 186, 187, 7, 9, 0, 0, 187, 188, 7, 15, 0, 0, 188, 189, 7, 0, 0, 0, 189, 190, 7, 0, 0, 0, 190, 52, 1, 0, 0, 0, 191, 192, 7, 5, 0, 0, 192, 193, 7, 10, 0, 0, 193, 194, 7, 19, 0, 0, 194, 201, 7, 3, 0, 0, 195, 196, 7, 20, 0, 0, 196, 197, 7, 15, 0, 0, 197, 198, 7, 0, 0, 0, 198, 199, 7, 9, 0, 0, 199, 201, 7, 3, 0, 0, 200, 191, 1, 0, 0, 0, 200, 195, 1, 0, 0, 0, 201, 54, 1, 0, 0, 0, 202, 203, 7, 21, 0, 0, 203, 56, 1, 0, 0, 0, 204, 206, 3, 55, 27, 0, 205, 204, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 208, 1, 0, 0, 0, 207, 209, 3, 71, 35, 0, 208, 207, 1, 0, 0, 0, 209, 210, 1, 0, 0, 0, 210, 208, 1, 0, 0, 0, 210, 211, 1, 0, 0, 0, 211, 219, 1, 0, 0, 0, 212, 216, 5, 46, 0, 0, 213, 215, 3, 71, 35, 0, 214, 213, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 220, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 212, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 230, 1, 0, 0, 0, 221, 223, 7, 3, 0, 0, 222, 224, 3, 55, 27, 0, 223, 222, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 226, 1, 0, 0, 0, 225, 227, 3, 71, 35, 0, 226, 225, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 231, 1, 0, 0, 0, 230, 221, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 253, 1, 0, 0, 0, 232, 234, 3, 55, 27, 0, 233, 232, 1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 237, 5, 46, 0, 0, 236, 238, 3, 71, 35, 0, 237, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0, 0, 0, 241, 243, 7, 3, 0, 0, 242, 244, 3, 55, 27, 0, 243, 242, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 71, 35, 0, 246, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251, 253, 1, 0, 0, 0, 252, 205, 1, 0, 0, 0, 252, 233, 1, 0, 0, 0, 253, 58, 1, 0, 0, 0, 254, 260, 5, 34, 0, 0, 255, 259, 8, 22, 0, 0, 256, 257, 5, 92, 0, 0, 257, 259, 9, 0, 0, 0, 258, 255, 1, 0, 0, 0, 258, 256, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 263, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 275, 5, 34, 0, 0, 264, 270, 5, 39, 0, 0, 265, 269, 8, 23, 0, 0, 266, 267, 5, 92, 0, 0, 267, 269, 9, 0, 0, 0, 268, 265, 1, 0, 0, 0, 268, 266, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 5, 39, 0, 0, 274, 254, 1, 0, 0, 0, 274, 264, 1, 0, 0, 0, 275, 60, 1, 0, 0, 0, 276, 280, 7, 24, 0, 0, 277, 279, 7, 25, 0, 0, 278, 277, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 62, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 284, 5, 91, 0, 0, 284, 285, 5, 93, 0, 0, 285, 64, 1, 0, 0, 0, 286, 287, 5, 91, 0, 0, 287, 288, 5, 42, 0, 0, 288, 289, 5, 93, 0, 0, 289, 66, 1, 0, 0, 0, 290, 297, 3, 61, 30, 0, 291, 292, 5, 46, 0, 0, 292, 296, 3, 61, 30, 0, 293, 296, 3, 63, 31, 0, 294, 296, 3, 65, 32, 0, 295, 291, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 295, 294, 1, 0, 0, 0, 296, 299, 1, 0, 0, 0, 297, 295, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 68, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 300, 302, 7, 26, 0, 0, 301, 300, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 303, 304, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 306, 6, 34, 0, 0, 306, 70, 1, 0, 0, 0, 307, 308, 7, 27, 0, 0, 308, 72, 1, 0, 0, 0, 309, 311, 8, 28, 0, 0, 310, 309, 1, 0, 0, 0, 311, 312, 1, 0, 0, 0, 312, 310, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 74, 1, 0, 0, 0, 28, 0, 88, 131, 148, 200, 205, 210, 216, 219, 223, 228, 230, 233, 239, 243, 248, 250, 252, 258, 260, 268, 270, 274, 280, 295, 297, 303, 312, 1, 6, 0, 0] \ No newline at end of file +[4, 0, 32, 320, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 89, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 132, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 149, 8, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 201, 8, 26, 1, 27, 1, 27, 1, 28, 3, 28, 206, 8, 28, 1, 28, 4, 28, 209, 8, 28, 11, 28, 12, 28, 210, 1, 28, 1, 28, 5, 28, 215, 8, 28, 10, 28, 12, 28, 218, 9, 28, 3, 28, 220, 8, 28, 1, 28, 1, 28, 3, 28, 224, 8, 28, 1, 28, 4, 28, 227, 8, 28, 11, 28, 12, 28, 228, 3, 28, 231, 8, 28, 1, 28, 3, 28, 234, 8, 28, 1, 28, 1, 28, 4, 28, 238, 8, 28, 11, 28, 12, 28, 239, 1, 28, 1, 28, 3, 28, 244, 8, 28, 1, 28, 4, 28, 247, 8, 28, 11, 28, 12, 28, 248, 3, 28, 251, 8, 28, 3, 28, 253, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 259, 8, 29, 10, 29, 12, 29, 262, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 269, 8, 29, 10, 29, 12, 29, 272, 9, 29, 1, 29, 3, 29, 275, 8, 29, 1, 30, 1, 30, 5, 30, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 4, 33, 298, 8, 33, 11, 33, 12, 33, 299, 5, 33, 302, 8, 33, 10, 33, 12, 33, 305, 9, 33, 1, 34, 4, 34, 308, 8, 34, 11, 34, 12, 34, 309, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 4, 36, 317, 8, 36, 11, 36, 12, 36, 318, 0, 0, 37, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 0, 57, 28, 59, 29, 61, 0, 63, 0, 65, 0, 67, 30, 69, 31, 71, 0, 73, 32, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116, 116, 2, 0, 87, 87, 119, 119, 2, 0, 78, 78, 110, 110, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 79, 79, 111, 111, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 4, 0, 35, 36, 64, 90, 95, 95, 97, 123, 7, 0, 35, 36, 45, 45, 47, 58, 64, 90, 95, 95, 97, 123, 125, 125, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 344, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 73, 1, 0, 0, 0, 1, 75, 1, 0, 0, 0, 3, 77, 1, 0, 0, 0, 5, 79, 1, 0, 0, 0, 7, 81, 1, 0, 0, 0, 9, 83, 1, 0, 0, 0, 11, 88, 1, 0, 0, 0, 13, 90, 1, 0, 0, 0, 15, 93, 1, 0, 0, 0, 17, 96, 1, 0, 0, 0, 19, 98, 1, 0, 0, 0, 21, 101, 1, 0, 0, 0, 23, 103, 1, 0, 0, 0, 25, 106, 1, 0, 0, 0, 27, 111, 1, 0, 0, 0, 29, 117, 1, 0, 0, 0, 31, 125, 1, 0, 0, 0, 33, 133, 1, 0, 0, 0, 35, 140, 1, 0, 0, 0, 37, 150, 1, 0, 0, 0, 39, 153, 1, 0, 0, 0, 41, 157, 1, 0, 0, 0, 43, 161, 1, 0, 0, 0, 45, 164, 1, 0, 0, 0, 47, 173, 1, 0, 0, 0, 49, 177, 1, 0, 0, 0, 51, 184, 1, 0, 0, 0, 53, 200, 1, 0, 0, 0, 55, 202, 1, 0, 0, 0, 57, 252, 1, 0, 0, 0, 59, 274, 1, 0, 0, 0, 61, 276, 1, 0, 0, 0, 63, 283, 1, 0, 0, 0, 65, 286, 1, 0, 0, 0, 67, 290, 1, 0, 0, 0, 69, 307, 1, 0, 0, 0, 71, 313, 1, 0, 0, 0, 73, 316, 1, 0, 0, 0, 75, 76, 5, 40, 0, 0, 76, 2, 1, 0, 0, 0, 77, 78, 5, 41, 0, 0, 78, 4, 1, 0, 0, 0, 79, 80, 5, 91, 0, 0, 80, 6, 1, 0, 0, 0, 81, 82, 5, 93, 0, 0, 82, 8, 1, 0, 0, 0, 83, 84, 5, 44, 0, 0, 84, 10, 1, 0, 0, 0, 85, 89, 5, 61, 0, 0, 86, 87, 5, 61, 0, 0, 87, 89, 5, 61, 0, 0, 88, 85, 1, 0, 0, 0, 88, 86, 1, 0, 0, 0, 89, 12, 1, 0, 0, 0, 90, 91, 5, 33, 0, 0, 91, 92, 5, 61, 0, 0, 92, 14, 1, 0, 0, 0, 93, 94, 5, 60, 0, 0, 94, 95, 5, 62, 0, 0, 95, 16, 1, 0, 0, 0, 96, 97, 5, 60, 0, 0, 97, 18, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 100, 5, 61, 0, 0, 100, 20, 1, 0, 0, 0, 101, 102, 5, 62, 0, 0, 102, 22, 1, 0, 0, 0, 103, 104, 5, 62, 0, 0, 104, 105, 5, 61, 0, 0, 105, 24, 1, 0, 0, 0, 106, 107, 7, 0, 0, 0, 107, 108, 7, 1, 0, 0, 108, 109, 7, 2, 0, 0, 109, 110, 7, 3, 0, 0, 110, 26, 1, 0, 0, 0, 111, 112, 7, 1, 0, 0, 112, 113, 7, 0, 0, 0, 113, 114, 7, 1, 0, 0, 114, 115, 7, 2, 0, 0, 115, 116, 7, 3, 0, 0, 116, 28, 1, 0, 0, 0, 117, 118, 7, 4, 0, 0, 118, 119, 7, 3, 0, 0, 119, 120, 7, 5, 0, 0, 120, 121, 7, 6, 0, 0, 121, 122, 7, 3, 0, 0, 122, 123, 7, 3, 0, 0, 123, 124, 7, 7, 0, 0, 124, 30, 1, 0, 0, 0, 125, 126, 7, 3, 0, 0, 126, 127, 7, 8, 0, 0, 127, 128, 7, 1, 0, 0, 128, 129, 7, 9, 0, 0, 129, 131, 7, 5, 0, 0, 130, 132, 7, 9, 0, 0, 131, 130, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 32, 1, 0, 0, 0, 133, 134, 7, 10, 0, 0, 134, 135, 7, 3, 0, 0, 135, 136, 7, 11, 0, 0, 136, 137, 7, 3, 0, 0, 137, 138, 7, 8, 0, 0, 138, 139, 7, 12, 0, 0, 139, 34, 1, 0, 0, 0, 140, 141, 7, 13, 0, 0, 141, 142, 7, 14, 0, 0, 142, 143, 7, 7, 0, 0, 143, 144, 7, 5, 0, 0, 144, 145, 7, 15, 0, 0, 145, 146, 7, 1, 0, 0, 146, 148, 7, 7, 0, 0, 147, 149, 7, 9, 0, 0, 148, 147, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, 36, 1, 0, 0, 0, 150, 151, 7, 1, 0, 0, 151, 152, 7, 7, 0, 0, 152, 38, 1, 0, 0, 0, 153, 154, 7, 7, 0, 0, 154, 155, 7, 14, 0, 0, 155, 156, 7, 5, 0, 0, 156, 40, 1, 0, 0, 0, 157, 158, 7, 15, 0, 0, 158, 159, 7, 7, 0, 0, 159, 160, 7, 16, 0, 0, 160, 42, 1, 0, 0, 0, 161, 162, 7, 14, 0, 0, 162, 163, 7, 10, 0, 0, 163, 44, 1, 0, 0, 0, 164, 165, 7, 17, 0, 0, 165, 166, 7, 15, 0, 0, 166, 167, 7, 9, 0, 0, 167, 168, 7, 5, 0, 0, 168, 169, 7, 14, 0, 0, 169, 170, 7, 2, 0, 0, 170, 171, 7, 3, 0, 0, 171, 172, 7, 7, 0, 0, 172, 46, 1, 0, 0, 0, 173, 174, 7, 17, 0, 0, 174, 175, 7, 15, 0, 0, 175, 176, 7, 9, 0, 0, 176, 48, 1, 0, 0, 0, 177, 178, 7, 17, 0, 0, 178, 179, 7, 15, 0, 0, 179, 180, 7, 9, 0, 0, 180, 181, 7, 15, 0, 0, 181, 182, 7, 7, 0, 0, 182, 183, 7, 18, 0, 0, 183, 50, 1, 0, 0, 0, 184, 185, 7, 17, 0, 0, 185, 186, 7, 15, 0, 0, 186, 187, 7, 9, 0, 0, 187, 188, 7, 15, 0, 0, 188, 189, 7, 0, 0, 0, 189, 190, 7, 0, 0, 0, 190, 52, 1, 0, 0, 0, 191, 192, 7, 5, 0, 0, 192, 193, 7, 10, 0, 0, 193, 194, 7, 19, 0, 0, 194, 201, 7, 3, 0, 0, 195, 196, 7, 20, 0, 0, 196, 197, 7, 15, 0, 0, 197, 198, 7, 0, 0, 0, 198, 199, 7, 9, 0, 0, 199, 201, 7, 3, 0, 0, 200, 191, 1, 0, 0, 0, 200, 195, 1, 0, 0, 0, 201, 54, 1, 0, 0, 0, 202, 203, 7, 21, 0, 0, 203, 56, 1, 0, 0, 0, 204, 206, 3, 55, 27, 0, 205, 204, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 208, 1, 0, 0, 0, 207, 209, 3, 71, 35, 0, 208, 207, 1, 0, 0, 0, 209, 210, 1, 0, 0, 0, 210, 208, 1, 0, 0, 0, 210, 211, 1, 0, 0, 0, 211, 219, 1, 0, 0, 0, 212, 216, 5, 46, 0, 0, 213, 215, 3, 71, 35, 0, 214, 213, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 220, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 212, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 230, 1, 0, 0, 0, 221, 223, 7, 3, 0, 0, 222, 224, 3, 55, 27, 0, 223, 222, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 226, 1, 0, 0, 0, 225, 227, 3, 71, 35, 0, 226, 225, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 231, 1, 0, 0, 0, 230, 221, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 253, 1, 0, 0, 0, 232, 234, 3, 55, 27, 0, 233, 232, 1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 237, 5, 46, 0, 0, 236, 238, 3, 71, 35, 0, 237, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0, 0, 0, 241, 243, 7, 3, 0, 0, 242, 244, 3, 55, 27, 0, 243, 242, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 71, 35, 0, 246, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251, 253, 1, 0, 0, 0, 252, 205, 1, 0, 0, 0, 252, 233, 1, 0, 0, 0, 253, 58, 1, 0, 0, 0, 254, 260, 5, 34, 0, 0, 255, 259, 8, 22, 0, 0, 256, 257, 5, 92, 0, 0, 257, 259, 9, 0, 0, 0, 258, 255, 1, 0, 0, 0, 258, 256, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 263, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 275, 5, 34, 0, 0, 264, 270, 5, 39, 0, 0, 265, 269, 8, 23, 0, 0, 266, 267, 5, 92, 0, 0, 267, 269, 9, 0, 0, 0, 268, 265, 1, 0, 0, 0, 268, 266, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 5, 39, 0, 0, 274, 254, 1, 0, 0, 0, 274, 264, 1, 0, 0, 0, 275, 60, 1, 0, 0, 0, 276, 280, 7, 24, 0, 0, 277, 279, 7, 25, 0, 0, 278, 277, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 62, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 284, 5, 91, 0, 0, 284, 285, 5, 93, 0, 0, 285, 64, 1, 0, 0, 0, 286, 287, 5, 91, 0, 0, 287, 288, 5, 42, 0, 0, 288, 289, 5, 93, 0, 0, 289, 66, 1, 0, 0, 0, 290, 303, 3, 61, 30, 0, 291, 292, 5, 46, 0, 0, 292, 302, 3, 61, 30, 0, 293, 302, 3, 63, 31, 0, 294, 302, 3, 65, 32, 0, 295, 297, 5, 46, 0, 0, 296, 298, 3, 71, 35, 0, 297, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 299, 300, 1, 0, 0, 0, 300, 302, 1, 0, 0, 0, 301, 291, 1, 0, 0, 0, 301, 293, 1, 0, 0, 0, 301, 294, 1, 0, 0, 0, 301, 295, 1, 0, 0, 0, 302, 305, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 303, 304, 1, 0, 0, 0, 304, 68, 1, 0, 0, 0, 305, 303, 1, 0, 0, 0, 306, 308, 7, 26, 0, 0, 307, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 309, 310, 1, 0, 0, 0, 310, 311, 1, 0, 0, 0, 311, 312, 6, 34, 0, 0, 312, 70, 1, 0, 0, 0, 313, 314, 7, 27, 0, 0, 314, 72, 1, 0, 0, 0, 315, 317, 8, 28, 0, 0, 316, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 316, 1, 0, 0, 0, 318, 319, 1, 0, 0, 0, 319, 74, 1, 0, 0, 0, 29, 0, 88, 131, 148, 200, 205, 210, 216, 219, 223, 228, 230, 233, 239, 243, 248, 250, 252, 258, 260, 268, 270, 274, 280, 299, 301, 303, 309, 318, 1, 6, 0, 0] \ No newline at end of file diff --git a/frontend/src/parser/FilterQueryLexer.ts b/frontend/src/parser/FilterQueryLexer.ts index fe4d66029230..26f468a81306 100644 --- a/frontend/src/parser/FilterQueryLexer.ts +++ b/frontend/src/parser/FilterQueryLexer.ts @@ -1,4 +1,4 @@ -// Generated from ../../../../grammar/FilterQuery.g4 by ANTLR 4.13.1 +// Generated from FilterQuery.g4 by ANTLR 4.13.1 // noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols import { ATN, @@ -100,7 +100,7 @@ export default class FilterQueryLexer extends Lexer { public get modeNames(): string[] { return FilterQueryLexer.modeNames; } - public static readonly _serializedATN: number[] = [4,0,32,314,6,-1,2,0, + public static readonly _serializedATN: number[] = [4,0,32,320,6,-1,2,0, 7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7,6,2,7,7,7,2,8,7,8,2,9, 7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13,2,14,7,14,2,15,7,15,2,16,7, 16,2,17,7,17,2,18,7,18,2,19,7,19,2,20,7,20,2,21,7,21,2,22,7,22,2,23,7,23, @@ -122,94 +122,97 @@ export default class FilterQueryLexer extends Lexer { 3,28,253,8,28,1,29,1,29,1,29,1,29,5,29,259,8,29,10,29,12,29,262,9,29,1, 29,1,29,1,29,1,29,1,29,5,29,269,8,29,10,29,12,29,272,9,29,1,29,3,29,275, 8,29,1,30,1,30,5,30,279,8,30,10,30,12,30,282,9,30,1,31,1,31,1,31,1,32,1, - 32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,5,33,296,8,33,10,33,12,33,299,9,33, - 1,34,4,34,302,8,34,11,34,12,34,303,1,34,1,34,1,35,1,35,1,36,4,36,311,8, - 36,11,36,12,36,312,0,0,37,1,1,3,2,5,3,7,4,9,5,11,6,13,7,15,8,17,9,19,10, - 21,11,23,12,25,13,27,14,29,15,31,16,33,17,35,18,37,19,39,20,41,21,43,22, - 45,23,47,24,49,25,51,26,53,27,55,0,57,28,59,29,61,0,63,0,65,0,67,30,69, - 31,71,0,73,32,1,0,29,2,0,76,76,108,108,2,0,73,73,105,105,2,0,75,75,107, - 107,2,0,69,69,101,101,2,0,66,66,98,98,2,0,84,84,116,116,2,0,87,87,119,119, - 2,0,78,78,110,110,2,0,88,88,120,120,2,0,83,83,115,115,2,0,82,82,114,114, - 2,0,71,71,103,103,2,0,80,80,112,112,2,0,67,67,99,99,2,0,79,79,111,111,2, - 0,65,65,97,97,2,0,68,68,100,100,2,0,72,72,104,104,2,0,89,89,121,121,2,0, - 85,85,117,117,2,0,70,70,102,102,2,0,43,43,45,45,2,0,34,34,92,92,2,0,39, - 39,92,92,4,0,36,36,65,90,95,95,97,122,6,0,36,36,45,45,47,58,65,90,95,95, - 97,122,3,0,9,10,13,13,32,32,1,0,48,57,8,0,9,10,13,13,32,34,39,41,44,44, - 60,62,91,91,93,93,336,0,1,1,0,0,0,0,3,1,0,0,0,0,5,1,0,0,0,0,7,1,0,0,0,0, - 9,1,0,0,0,0,11,1,0,0,0,0,13,1,0,0,0,0,15,1,0,0,0,0,17,1,0,0,0,0,19,1,0, - 0,0,0,21,1,0,0,0,0,23,1,0,0,0,0,25,1,0,0,0,0,27,1,0,0,0,0,29,1,0,0,0,0, - 31,1,0,0,0,0,33,1,0,0,0,0,35,1,0,0,0,0,37,1,0,0,0,0,39,1,0,0,0,0,41,1,0, - 0,0,0,43,1,0,0,0,0,45,1,0,0,0,0,47,1,0,0,0,0,49,1,0,0,0,0,51,1,0,0,0,0, - 53,1,0,0,0,0,57,1,0,0,0,0,59,1,0,0,0,0,67,1,0,0,0,0,69,1,0,0,0,0,73,1,0, - 0,0,1,75,1,0,0,0,3,77,1,0,0,0,5,79,1,0,0,0,7,81,1,0,0,0,9,83,1,0,0,0,11, - 88,1,0,0,0,13,90,1,0,0,0,15,93,1,0,0,0,17,96,1,0,0,0,19,98,1,0,0,0,21,101, - 1,0,0,0,23,103,1,0,0,0,25,106,1,0,0,0,27,111,1,0,0,0,29,117,1,0,0,0,31, - 125,1,0,0,0,33,133,1,0,0,0,35,140,1,0,0,0,37,150,1,0,0,0,39,153,1,0,0,0, - 41,157,1,0,0,0,43,161,1,0,0,0,45,164,1,0,0,0,47,173,1,0,0,0,49,177,1,0, - 0,0,51,184,1,0,0,0,53,200,1,0,0,0,55,202,1,0,0,0,57,252,1,0,0,0,59,274, - 1,0,0,0,61,276,1,0,0,0,63,283,1,0,0,0,65,286,1,0,0,0,67,290,1,0,0,0,69, - 301,1,0,0,0,71,307,1,0,0,0,73,310,1,0,0,0,75,76,5,40,0,0,76,2,1,0,0,0,77, - 78,5,41,0,0,78,4,1,0,0,0,79,80,5,91,0,0,80,6,1,0,0,0,81,82,5,93,0,0,82, - 8,1,0,0,0,83,84,5,44,0,0,84,10,1,0,0,0,85,89,5,61,0,0,86,87,5,61,0,0,87, - 89,5,61,0,0,88,85,1,0,0,0,88,86,1,0,0,0,89,12,1,0,0,0,90,91,5,33,0,0,91, - 92,5,61,0,0,92,14,1,0,0,0,93,94,5,60,0,0,94,95,5,62,0,0,95,16,1,0,0,0,96, - 97,5,60,0,0,97,18,1,0,0,0,98,99,5,60,0,0,99,100,5,61,0,0,100,20,1,0,0,0, - 101,102,5,62,0,0,102,22,1,0,0,0,103,104,5,62,0,0,104,105,5,61,0,0,105,24, - 1,0,0,0,106,107,7,0,0,0,107,108,7,1,0,0,108,109,7,2,0,0,109,110,7,3,0,0, - 110,26,1,0,0,0,111,112,7,1,0,0,112,113,7,0,0,0,113,114,7,1,0,0,114,115, - 7,2,0,0,115,116,7,3,0,0,116,28,1,0,0,0,117,118,7,4,0,0,118,119,7,3,0,0, - 119,120,7,5,0,0,120,121,7,6,0,0,121,122,7,3,0,0,122,123,7,3,0,0,123,124, - 7,7,0,0,124,30,1,0,0,0,125,126,7,3,0,0,126,127,7,8,0,0,127,128,7,1,0,0, - 128,129,7,9,0,0,129,131,7,5,0,0,130,132,7,9,0,0,131,130,1,0,0,0,131,132, - 1,0,0,0,132,32,1,0,0,0,133,134,7,10,0,0,134,135,7,3,0,0,135,136,7,11,0, - 0,136,137,7,3,0,0,137,138,7,8,0,0,138,139,7,12,0,0,139,34,1,0,0,0,140,141, - 7,13,0,0,141,142,7,14,0,0,142,143,7,7,0,0,143,144,7,5,0,0,144,145,7,15, - 0,0,145,146,7,1,0,0,146,148,7,7,0,0,147,149,7,9,0,0,148,147,1,0,0,0,148, - 149,1,0,0,0,149,36,1,0,0,0,150,151,7,1,0,0,151,152,7,7,0,0,152,38,1,0,0, - 0,153,154,7,7,0,0,154,155,7,14,0,0,155,156,7,5,0,0,156,40,1,0,0,0,157,158, - 7,15,0,0,158,159,7,7,0,0,159,160,7,16,0,0,160,42,1,0,0,0,161,162,7,14,0, - 0,162,163,7,10,0,0,163,44,1,0,0,0,164,165,7,17,0,0,165,166,7,15,0,0,166, - 167,7,9,0,0,167,168,7,5,0,0,168,169,7,14,0,0,169,170,7,2,0,0,170,171,7, - 3,0,0,171,172,7,7,0,0,172,46,1,0,0,0,173,174,7,17,0,0,174,175,7,15,0,0, - 175,176,7,9,0,0,176,48,1,0,0,0,177,178,7,17,0,0,178,179,7,15,0,0,179,180, - 7,9,0,0,180,181,7,15,0,0,181,182,7,7,0,0,182,183,7,18,0,0,183,50,1,0,0, - 0,184,185,7,17,0,0,185,186,7,15,0,0,186,187,7,9,0,0,187,188,7,15,0,0,188, - 189,7,0,0,0,189,190,7,0,0,0,190,52,1,0,0,0,191,192,7,5,0,0,192,193,7,10, - 0,0,193,194,7,19,0,0,194,201,7,3,0,0,195,196,7,20,0,0,196,197,7,15,0,0, - 197,198,7,0,0,0,198,199,7,9,0,0,199,201,7,3,0,0,200,191,1,0,0,0,200,195, - 1,0,0,0,201,54,1,0,0,0,202,203,7,21,0,0,203,56,1,0,0,0,204,206,3,55,27, - 0,205,204,1,0,0,0,205,206,1,0,0,0,206,208,1,0,0,0,207,209,3,71,35,0,208, - 207,1,0,0,0,209,210,1,0,0,0,210,208,1,0,0,0,210,211,1,0,0,0,211,219,1,0, - 0,0,212,216,5,46,0,0,213,215,3,71,35,0,214,213,1,0,0,0,215,218,1,0,0,0, - 216,214,1,0,0,0,216,217,1,0,0,0,217,220,1,0,0,0,218,216,1,0,0,0,219,212, - 1,0,0,0,219,220,1,0,0,0,220,230,1,0,0,0,221,223,7,3,0,0,222,224,3,55,27, - 0,223,222,1,0,0,0,223,224,1,0,0,0,224,226,1,0,0,0,225,227,3,71,35,0,226, - 225,1,0,0,0,227,228,1,0,0,0,228,226,1,0,0,0,228,229,1,0,0,0,229,231,1,0, - 0,0,230,221,1,0,0,0,230,231,1,0,0,0,231,253,1,0,0,0,232,234,3,55,27,0,233, - 232,1,0,0,0,233,234,1,0,0,0,234,235,1,0,0,0,235,237,5,46,0,0,236,238,3, - 71,35,0,237,236,1,0,0,0,238,239,1,0,0,0,239,237,1,0,0,0,239,240,1,0,0,0, - 240,250,1,0,0,0,241,243,7,3,0,0,242,244,3,55,27,0,243,242,1,0,0,0,243,244, - 1,0,0,0,244,246,1,0,0,0,245,247,3,71,35,0,246,245,1,0,0,0,247,248,1,0,0, - 0,248,246,1,0,0,0,248,249,1,0,0,0,249,251,1,0,0,0,250,241,1,0,0,0,250,251, - 1,0,0,0,251,253,1,0,0,0,252,205,1,0,0,0,252,233,1,0,0,0,253,58,1,0,0,0, - 254,260,5,34,0,0,255,259,8,22,0,0,256,257,5,92,0,0,257,259,9,0,0,0,258, - 255,1,0,0,0,258,256,1,0,0,0,259,262,1,0,0,0,260,258,1,0,0,0,260,261,1,0, - 0,0,261,263,1,0,0,0,262,260,1,0,0,0,263,275,5,34,0,0,264,270,5,39,0,0,265, - 269,8,23,0,0,266,267,5,92,0,0,267,269,9,0,0,0,268,265,1,0,0,0,268,266,1, - 0,0,0,269,272,1,0,0,0,270,268,1,0,0,0,270,271,1,0,0,0,271,273,1,0,0,0,272, - 270,1,0,0,0,273,275,5,39,0,0,274,254,1,0,0,0,274,264,1,0,0,0,275,60,1,0, - 0,0,276,280,7,24,0,0,277,279,7,25,0,0,278,277,1,0,0,0,279,282,1,0,0,0,280, - 278,1,0,0,0,280,281,1,0,0,0,281,62,1,0,0,0,282,280,1,0,0,0,283,284,5,91, - 0,0,284,285,5,93,0,0,285,64,1,0,0,0,286,287,5,91,0,0,287,288,5,42,0,0,288, - 289,5,93,0,0,289,66,1,0,0,0,290,297,3,61,30,0,291,292,5,46,0,0,292,296, - 3,61,30,0,293,296,3,63,31,0,294,296,3,65,32,0,295,291,1,0,0,0,295,293,1, - 0,0,0,295,294,1,0,0,0,296,299,1,0,0,0,297,295,1,0,0,0,297,298,1,0,0,0,298, - 68,1,0,0,0,299,297,1,0,0,0,300,302,7,26,0,0,301,300,1,0,0,0,302,303,1,0, - 0,0,303,301,1,0,0,0,303,304,1,0,0,0,304,305,1,0,0,0,305,306,6,34,0,0,306, - 70,1,0,0,0,307,308,7,27,0,0,308,72,1,0,0,0,309,311,8,28,0,0,310,309,1,0, - 0,0,311,312,1,0,0,0,312,310,1,0,0,0,312,313,1,0,0,0,313,74,1,0,0,0,28,0, - 88,131,148,200,205,210,216,219,223,228,230,233,239,243,248,250,252,258, - 260,268,270,274,280,295,297,303,312,1,6,0,0]; + 32,1,32,1,32,1,33,1,33,1,33,1,33,1,33,1,33,1,33,4,33,298,8,33,11,33,12, + 33,299,5,33,302,8,33,10,33,12,33,305,9,33,1,34,4,34,308,8,34,11,34,12,34, + 309,1,34,1,34,1,35,1,35,1,36,4,36,317,8,36,11,36,12,36,318,0,0,37,1,1,3, + 2,5,3,7,4,9,5,11,6,13,7,15,8,17,9,19,10,21,11,23,12,25,13,27,14,29,15,31, + 16,33,17,35,18,37,19,39,20,41,21,43,22,45,23,47,24,49,25,51,26,53,27,55, + 0,57,28,59,29,61,0,63,0,65,0,67,30,69,31,71,0,73,32,1,0,29,2,0,76,76,108, + 108,2,0,73,73,105,105,2,0,75,75,107,107,2,0,69,69,101,101,2,0,66,66,98, + 98,2,0,84,84,116,116,2,0,87,87,119,119,2,0,78,78,110,110,2,0,88,88,120, + 120,2,0,83,83,115,115,2,0,82,82,114,114,2,0,71,71,103,103,2,0,80,80,112, + 112,2,0,67,67,99,99,2,0,79,79,111,111,2,0,65,65,97,97,2,0,68,68,100,100, + 2,0,72,72,104,104,2,0,89,89,121,121,2,0,85,85,117,117,2,0,70,70,102,102, + 2,0,43,43,45,45,2,0,34,34,92,92,2,0,39,39,92,92,4,0,35,36,64,90,95,95,97, + 123,7,0,35,36,45,45,47,58,64,90,95,95,97,123,125,125,3,0,9,10,13,13,32, + 32,1,0,48,57,8,0,9,10,13,13,32,34,39,41,44,44,60,62,91,91,93,93,344,0,1, + 1,0,0,0,0,3,1,0,0,0,0,5,1,0,0,0,0,7,1,0,0,0,0,9,1,0,0,0,0,11,1,0,0,0,0, + 13,1,0,0,0,0,15,1,0,0,0,0,17,1,0,0,0,0,19,1,0,0,0,0,21,1,0,0,0,0,23,1,0, + 0,0,0,25,1,0,0,0,0,27,1,0,0,0,0,29,1,0,0,0,0,31,1,0,0,0,0,33,1,0,0,0,0, + 35,1,0,0,0,0,37,1,0,0,0,0,39,1,0,0,0,0,41,1,0,0,0,0,43,1,0,0,0,0,45,1,0, + 0,0,0,47,1,0,0,0,0,49,1,0,0,0,0,51,1,0,0,0,0,53,1,0,0,0,0,57,1,0,0,0,0, + 59,1,0,0,0,0,67,1,0,0,0,0,69,1,0,0,0,0,73,1,0,0,0,1,75,1,0,0,0,3,77,1,0, + 0,0,5,79,1,0,0,0,7,81,1,0,0,0,9,83,1,0,0,0,11,88,1,0,0,0,13,90,1,0,0,0, + 15,93,1,0,0,0,17,96,1,0,0,0,19,98,1,0,0,0,21,101,1,0,0,0,23,103,1,0,0,0, + 25,106,1,0,0,0,27,111,1,0,0,0,29,117,1,0,0,0,31,125,1,0,0,0,33,133,1,0, + 0,0,35,140,1,0,0,0,37,150,1,0,0,0,39,153,1,0,0,0,41,157,1,0,0,0,43,161, + 1,0,0,0,45,164,1,0,0,0,47,173,1,0,0,0,49,177,1,0,0,0,51,184,1,0,0,0,53, + 200,1,0,0,0,55,202,1,0,0,0,57,252,1,0,0,0,59,274,1,0,0,0,61,276,1,0,0,0, + 63,283,1,0,0,0,65,286,1,0,0,0,67,290,1,0,0,0,69,307,1,0,0,0,71,313,1,0, + 0,0,73,316,1,0,0,0,75,76,5,40,0,0,76,2,1,0,0,0,77,78,5,41,0,0,78,4,1,0, + 0,0,79,80,5,91,0,0,80,6,1,0,0,0,81,82,5,93,0,0,82,8,1,0,0,0,83,84,5,44, + 0,0,84,10,1,0,0,0,85,89,5,61,0,0,86,87,5,61,0,0,87,89,5,61,0,0,88,85,1, + 0,0,0,88,86,1,0,0,0,89,12,1,0,0,0,90,91,5,33,0,0,91,92,5,61,0,0,92,14,1, + 0,0,0,93,94,5,60,0,0,94,95,5,62,0,0,95,16,1,0,0,0,96,97,5,60,0,0,97,18, + 1,0,0,0,98,99,5,60,0,0,99,100,5,61,0,0,100,20,1,0,0,0,101,102,5,62,0,0, + 102,22,1,0,0,0,103,104,5,62,0,0,104,105,5,61,0,0,105,24,1,0,0,0,106,107, + 7,0,0,0,107,108,7,1,0,0,108,109,7,2,0,0,109,110,7,3,0,0,110,26,1,0,0,0, + 111,112,7,1,0,0,112,113,7,0,0,0,113,114,7,1,0,0,114,115,7,2,0,0,115,116, + 7,3,0,0,116,28,1,0,0,0,117,118,7,4,0,0,118,119,7,3,0,0,119,120,7,5,0,0, + 120,121,7,6,0,0,121,122,7,3,0,0,122,123,7,3,0,0,123,124,7,7,0,0,124,30, + 1,0,0,0,125,126,7,3,0,0,126,127,7,8,0,0,127,128,7,1,0,0,128,129,7,9,0,0, + 129,131,7,5,0,0,130,132,7,9,0,0,131,130,1,0,0,0,131,132,1,0,0,0,132,32, + 1,0,0,0,133,134,7,10,0,0,134,135,7,3,0,0,135,136,7,11,0,0,136,137,7,3,0, + 0,137,138,7,8,0,0,138,139,7,12,0,0,139,34,1,0,0,0,140,141,7,13,0,0,141, + 142,7,14,0,0,142,143,7,7,0,0,143,144,7,5,0,0,144,145,7,15,0,0,145,146,7, + 1,0,0,146,148,7,7,0,0,147,149,7,9,0,0,148,147,1,0,0,0,148,149,1,0,0,0,149, + 36,1,0,0,0,150,151,7,1,0,0,151,152,7,7,0,0,152,38,1,0,0,0,153,154,7,7,0, + 0,154,155,7,14,0,0,155,156,7,5,0,0,156,40,1,0,0,0,157,158,7,15,0,0,158, + 159,7,7,0,0,159,160,7,16,0,0,160,42,1,0,0,0,161,162,7,14,0,0,162,163,7, + 10,0,0,163,44,1,0,0,0,164,165,7,17,0,0,165,166,7,15,0,0,166,167,7,9,0,0, + 167,168,7,5,0,0,168,169,7,14,0,0,169,170,7,2,0,0,170,171,7,3,0,0,171,172, + 7,7,0,0,172,46,1,0,0,0,173,174,7,17,0,0,174,175,7,15,0,0,175,176,7,9,0, + 0,176,48,1,0,0,0,177,178,7,17,0,0,178,179,7,15,0,0,179,180,7,9,0,0,180, + 181,7,15,0,0,181,182,7,7,0,0,182,183,7,18,0,0,183,50,1,0,0,0,184,185,7, + 17,0,0,185,186,7,15,0,0,186,187,7,9,0,0,187,188,7,15,0,0,188,189,7,0,0, + 0,189,190,7,0,0,0,190,52,1,0,0,0,191,192,7,5,0,0,192,193,7,10,0,0,193,194, + 7,19,0,0,194,201,7,3,0,0,195,196,7,20,0,0,196,197,7,15,0,0,197,198,7,0, + 0,0,198,199,7,9,0,0,199,201,7,3,0,0,200,191,1,0,0,0,200,195,1,0,0,0,201, + 54,1,0,0,0,202,203,7,21,0,0,203,56,1,0,0,0,204,206,3,55,27,0,205,204,1, + 0,0,0,205,206,1,0,0,0,206,208,1,0,0,0,207,209,3,71,35,0,208,207,1,0,0,0, + 209,210,1,0,0,0,210,208,1,0,0,0,210,211,1,0,0,0,211,219,1,0,0,0,212,216, + 5,46,0,0,213,215,3,71,35,0,214,213,1,0,0,0,215,218,1,0,0,0,216,214,1,0, + 0,0,216,217,1,0,0,0,217,220,1,0,0,0,218,216,1,0,0,0,219,212,1,0,0,0,219, + 220,1,0,0,0,220,230,1,0,0,0,221,223,7,3,0,0,222,224,3,55,27,0,223,222,1, + 0,0,0,223,224,1,0,0,0,224,226,1,0,0,0,225,227,3,71,35,0,226,225,1,0,0,0, + 227,228,1,0,0,0,228,226,1,0,0,0,228,229,1,0,0,0,229,231,1,0,0,0,230,221, + 1,0,0,0,230,231,1,0,0,0,231,253,1,0,0,0,232,234,3,55,27,0,233,232,1,0,0, + 0,233,234,1,0,0,0,234,235,1,0,0,0,235,237,5,46,0,0,236,238,3,71,35,0,237, + 236,1,0,0,0,238,239,1,0,0,0,239,237,1,0,0,0,239,240,1,0,0,0,240,250,1,0, + 0,0,241,243,7,3,0,0,242,244,3,55,27,0,243,242,1,0,0,0,243,244,1,0,0,0,244, + 246,1,0,0,0,245,247,3,71,35,0,246,245,1,0,0,0,247,248,1,0,0,0,248,246,1, + 0,0,0,248,249,1,0,0,0,249,251,1,0,0,0,250,241,1,0,0,0,250,251,1,0,0,0,251, + 253,1,0,0,0,252,205,1,0,0,0,252,233,1,0,0,0,253,58,1,0,0,0,254,260,5,34, + 0,0,255,259,8,22,0,0,256,257,5,92,0,0,257,259,9,0,0,0,258,255,1,0,0,0,258, + 256,1,0,0,0,259,262,1,0,0,0,260,258,1,0,0,0,260,261,1,0,0,0,261,263,1,0, + 0,0,262,260,1,0,0,0,263,275,5,34,0,0,264,270,5,39,0,0,265,269,8,23,0,0, + 266,267,5,92,0,0,267,269,9,0,0,0,268,265,1,0,0,0,268,266,1,0,0,0,269,272, + 1,0,0,0,270,268,1,0,0,0,270,271,1,0,0,0,271,273,1,0,0,0,272,270,1,0,0,0, + 273,275,5,39,0,0,274,254,1,0,0,0,274,264,1,0,0,0,275,60,1,0,0,0,276,280, + 7,24,0,0,277,279,7,25,0,0,278,277,1,0,0,0,279,282,1,0,0,0,280,278,1,0,0, + 0,280,281,1,0,0,0,281,62,1,0,0,0,282,280,1,0,0,0,283,284,5,91,0,0,284,285, + 5,93,0,0,285,64,1,0,0,0,286,287,5,91,0,0,287,288,5,42,0,0,288,289,5,93, + 0,0,289,66,1,0,0,0,290,303,3,61,30,0,291,292,5,46,0,0,292,302,3,61,30,0, + 293,302,3,63,31,0,294,302,3,65,32,0,295,297,5,46,0,0,296,298,3,71,35,0, + 297,296,1,0,0,0,298,299,1,0,0,0,299,297,1,0,0,0,299,300,1,0,0,0,300,302, + 1,0,0,0,301,291,1,0,0,0,301,293,1,0,0,0,301,294,1,0,0,0,301,295,1,0,0,0, + 302,305,1,0,0,0,303,301,1,0,0,0,303,304,1,0,0,0,304,68,1,0,0,0,305,303, + 1,0,0,0,306,308,7,26,0,0,307,306,1,0,0,0,308,309,1,0,0,0,309,307,1,0,0, + 0,309,310,1,0,0,0,310,311,1,0,0,0,311,312,6,34,0,0,312,70,1,0,0,0,313,314, + 7,27,0,0,314,72,1,0,0,0,315,317,8,28,0,0,316,315,1,0,0,0,317,318,1,0,0, + 0,318,316,1,0,0,0,318,319,1,0,0,0,319,74,1,0,0,0,29,0,88,131,148,200,205, + 210,216,219,223,228,230,233,239,243,248,250,252,258,260,268,270,274,280, + 299,301,303,309,318,1,6,0,0]; private static __ATN: ATN; public static get _ATN(): ATN { diff --git a/frontend/src/parser/FilterQueryListener.ts b/frontend/src/parser/FilterQueryListener.ts index a05a158de3a9..8fd9bf9c28f8 100644 --- a/frontend/src/parser/FilterQueryListener.ts +++ b/frontend/src/parser/FilterQueryListener.ts @@ -1,4 +1,4 @@ -// Generated from ../../../../grammar/FilterQuery.g4 by ANTLR 4.13.1 +// Generated from FilterQuery.g4 by ANTLR 4.13.1 import {ParseTreeListener} from "antlr4"; diff --git a/frontend/src/parser/FilterQueryParser.ts b/frontend/src/parser/FilterQueryParser.ts index d9c11a1646f9..bb66f2ef58fc 100644 --- a/frontend/src/parser/FilterQueryParser.ts +++ b/frontend/src/parser/FilterQueryParser.ts @@ -1,4 +1,4 @@ -// Generated from ../../../../grammar/FilterQuery.g4 by ANTLR 4.13.1 +// Generated from FilterQuery.g4 by ANTLR 4.13.1 // noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols import { diff --git a/frontend/src/parser/FilterQueryVisitor.ts b/frontend/src/parser/FilterQueryVisitor.ts index 9ffa568cf36e..50578ecb7a7f 100644 --- a/frontend/src/parser/FilterQueryVisitor.ts +++ b/frontend/src/parser/FilterQueryVisitor.ts @@ -1,4 +1,4 @@ -// Generated from ../../../../grammar/FilterQuery.g4 by ANTLR 4.13.1 +// Generated from FilterQuery.g4 by ANTLR 4.13.1 import {ParseTreeVisitor} from 'antlr4'; diff --git a/grammar/FilterQuery.g4 b/grammar/FilterQuery.g4 index 18174ab01e41..405090510eba 100644 --- a/grammar/FilterQuery.g4 +++ b/grammar/FilterQuery.g4 @@ -207,12 +207,12 @@ QUOTED_TEXT ) ; -fragment SEGMENT : [a-zA-Z$_] [a-zA-Z0-9$_:\-/]* ; +fragment SEGMENT : [a-zA-Z$_@{#] [a-zA-Z0-9$_@#{}:\-/]* ; fragment EMPTY_BRACKS : '[' ']' ; fragment OLD_JSON_BRACKS: '[' '*' ']'; KEY - : SEGMENT ( '.' SEGMENT | EMPTY_BRACKS | OLD_JSON_BRACKS)* + : SEGMENT ( '.' SEGMENT | EMPTY_BRACKS | OLD_JSON_BRACKS | '.' DIGIT+)* ; // Ignore whitespace diff --git a/pkg/parser/grammar/FilterQueryLexer.interp b/pkg/parser/grammar/FilterQueryLexer.interp index 6f3d2d7e1697..bb681c909134 100644 --- a/pkg/parser/grammar/FilterQueryLexer.interp +++ b/pkg/parser/grammar/FilterQueryLexer.interp @@ -115,4 +115,4 @@ mode names: DEFAULT_MODE atn: -[4, 0, 32, 314, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 89, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 132, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 149, 8, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 201, 8, 26, 1, 27, 1, 27, 1, 28, 3, 28, 206, 8, 28, 1, 28, 4, 28, 209, 8, 28, 11, 28, 12, 28, 210, 1, 28, 1, 28, 5, 28, 215, 8, 28, 10, 28, 12, 28, 218, 9, 28, 3, 28, 220, 8, 28, 1, 28, 1, 28, 3, 28, 224, 8, 28, 1, 28, 4, 28, 227, 8, 28, 11, 28, 12, 28, 228, 3, 28, 231, 8, 28, 1, 28, 3, 28, 234, 8, 28, 1, 28, 1, 28, 4, 28, 238, 8, 28, 11, 28, 12, 28, 239, 1, 28, 1, 28, 3, 28, 244, 8, 28, 1, 28, 4, 28, 247, 8, 28, 11, 28, 12, 28, 248, 3, 28, 251, 8, 28, 3, 28, 253, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 259, 8, 29, 10, 29, 12, 29, 262, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 269, 8, 29, 10, 29, 12, 29, 272, 9, 29, 1, 29, 3, 29, 275, 8, 29, 1, 30, 1, 30, 5, 30, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 296, 8, 33, 10, 33, 12, 33, 299, 9, 33, 1, 34, 4, 34, 302, 8, 34, 11, 34, 12, 34, 303, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 4, 36, 311, 8, 36, 11, 36, 12, 36, 312, 0, 0, 37, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 0, 57, 28, 59, 29, 61, 0, 63, 0, 65, 0, 67, 30, 69, 31, 71, 0, 73, 32, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116, 116, 2, 0, 87, 87, 119, 119, 2, 0, 78, 78, 110, 110, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 79, 79, 111, 111, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 4, 0, 36, 36, 65, 90, 95, 95, 97, 122, 6, 0, 36, 36, 45, 45, 47, 58, 65, 90, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 336, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 73, 1, 0, 0, 0, 1, 75, 1, 0, 0, 0, 3, 77, 1, 0, 0, 0, 5, 79, 1, 0, 0, 0, 7, 81, 1, 0, 0, 0, 9, 83, 1, 0, 0, 0, 11, 88, 1, 0, 0, 0, 13, 90, 1, 0, 0, 0, 15, 93, 1, 0, 0, 0, 17, 96, 1, 0, 0, 0, 19, 98, 1, 0, 0, 0, 21, 101, 1, 0, 0, 0, 23, 103, 1, 0, 0, 0, 25, 106, 1, 0, 0, 0, 27, 111, 1, 0, 0, 0, 29, 117, 1, 0, 0, 0, 31, 125, 1, 0, 0, 0, 33, 133, 1, 0, 0, 0, 35, 140, 1, 0, 0, 0, 37, 150, 1, 0, 0, 0, 39, 153, 1, 0, 0, 0, 41, 157, 1, 0, 0, 0, 43, 161, 1, 0, 0, 0, 45, 164, 1, 0, 0, 0, 47, 173, 1, 0, 0, 0, 49, 177, 1, 0, 0, 0, 51, 184, 1, 0, 0, 0, 53, 200, 1, 0, 0, 0, 55, 202, 1, 0, 0, 0, 57, 252, 1, 0, 0, 0, 59, 274, 1, 0, 0, 0, 61, 276, 1, 0, 0, 0, 63, 283, 1, 0, 0, 0, 65, 286, 1, 0, 0, 0, 67, 290, 1, 0, 0, 0, 69, 301, 1, 0, 0, 0, 71, 307, 1, 0, 0, 0, 73, 310, 1, 0, 0, 0, 75, 76, 5, 40, 0, 0, 76, 2, 1, 0, 0, 0, 77, 78, 5, 41, 0, 0, 78, 4, 1, 0, 0, 0, 79, 80, 5, 91, 0, 0, 80, 6, 1, 0, 0, 0, 81, 82, 5, 93, 0, 0, 82, 8, 1, 0, 0, 0, 83, 84, 5, 44, 0, 0, 84, 10, 1, 0, 0, 0, 85, 89, 5, 61, 0, 0, 86, 87, 5, 61, 0, 0, 87, 89, 5, 61, 0, 0, 88, 85, 1, 0, 0, 0, 88, 86, 1, 0, 0, 0, 89, 12, 1, 0, 0, 0, 90, 91, 5, 33, 0, 0, 91, 92, 5, 61, 0, 0, 92, 14, 1, 0, 0, 0, 93, 94, 5, 60, 0, 0, 94, 95, 5, 62, 0, 0, 95, 16, 1, 0, 0, 0, 96, 97, 5, 60, 0, 0, 97, 18, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 100, 5, 61, 0, 0, 100, 20, 1, 0, 0, 0, 101, 102, 5, 62, 0, 0, 102, 22, 1, 0, 0, 0, 103, 104, 5, 62, 0, 0, 104, 105, 5, 61, 0, 0, 105, 24, 1, 0, 0, 0, 106, 107, 7, 0, 0, 0, 107, 108, 7, 1, 0, 0, 108, 109, 7, 2, 0, 0, 109, 110, 7, 3, 0, 0, 110, 26, 1, 0, 0, 0, 111, 112, 7, 1, 0, 0, 112, 113, 7, 0, 0, 0, 113, 114, 7, 1, 0, 0, 114, 115, 7, 2, 0, 0, 115, 116, 7, 3, 0, 0, 116, 28, 1, 0, 0, 0, 117, 118, 7, 4, 0, 0, 118, 119, 7, 3, 0, 0, 119, 120, 7, 5, 0, 0, 120, 121, 7, 6, 0, 0, 121, 122, 7, 3, 0, 0, 122, 123, 7, 3, 0, 0, 123, 124, 7, 7, 0, 0, 124, 30, 1, 0, 0, 0, 125, 126, 7, 3, 0, 0, 126, 127, 7, 8, 0, 0, 127, 128, 7, 1, 0, 0, 128, 129, 7, 9, 0, 0, 129, 131, 7, 5, 0, 0, 130, 132, 7, 9, 0, 0, 131, 130, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 32, 1, 0, 0, 0, 133, 134, 7, 10, 0, 0, 134, 135, 7, 3, 0, 0, 135, 136, 7, 11, 0, 0, 136, 137, 7, 3, 0, 0, 137, 138, 7, 8, 0, 0, 138, 139, 7, 12, 0, 0, 139, 34, 1, 0, 0, 0, 140, 141, 7, 13, 0, 0, 141, 142, 7, 14, 0, 0, 142, 143, 7, 7, 0, 0, 143, 144, 7, 5, 0, 0, 144, 145, 7, 15, 0, 0, 145, 146, 7, 1, 0, 0, 146, 148, 7, 7, 0, 0, 147, 149, 7, 9, 0, 0, 148, 147, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, 36, 1, 0, 0, 0, 150, 151, 7, 1, 0, 0, 151, 152, 7, 7, 0, 0, 152, 38, 1, 0, 0, 0, 153, 154, 7, 7, 0, 0, 154, 155, 7, 14, 0, 0, 155, 156, 7, 5, 0, 0, 156, 40, 1, 0, 0, 0, 157, 158, 7, 15, 0, 0, 158, 159, 7, 7, 0, 0, 159, 160, 7, 16, 0, 0, 160, 42, 1, 0, 0, 0, 161, 162, 7, 14, 0, 0, 162, 163, 7, 10, 0, 0, 163, 44, 1, 0, 0, 0, 164, 165, 7, 17, 0, 0, 165, 166, 7, 15, 0, 0, 166, 167, 7, 9, 0, 0, 167, 168, 7, 5, 0, 0, 168, 169, 7, 14, 0, 0, 169, 170, 7, 2, 0, 0, 170, 171, 7, 3, 0, 0, 171, 172, 7, 7, 0, 0, 172, 46, 1, 0, 0, 0, 173, 174, 7, 17, 0, 0, 174, 175, 7, 15, 0, 0, 175, 176, 7, 9, 0, 0, 176, 48, 1, 0, 0, 0, 177, 178, 7, 17, 0, 0, 178, 179, 7, 15, 0, 0, 179, 180, 7, 9, 0, 0, 180, 181, 7, 15, 0, 0, 181, 182, 7, 7, 0, 0, 182, 183, 7, 18, 0, 0, 183, 50, 1, 0, 0, 0, 184, 185, 7, 17, 0, 0, 185, 186, 7, 15, 0, 0, 186, 187, 7, 9, 0, 0, 187, 188, 7, 15, 0, 0, 188, 189, 7, 0, 0, 0, 189, 190, 7, 0, 0, 0, 190, 52, 1, 0, 0, 0, 191, 192, 7, 5, 0, 0, 192, 193, 7, 10, 0, 0, 193, 194, 7, 19, 0, 0, 194, 201, 7, 3, 0, 0, 195, 196, 7, 20, 0, 0, 196, 197, 7, 15, 0, 0, 197, 198, 7, 0, 0, 0, 198, 199, 7, 9, 0, 0, 199, 201, 7, 3, 0, 0, 200, 191, 1, 0, 0, 0, 200, 195, 1, 0, 0, 0, 201, 54, 1, 0, 0, 0, 202, 203, 7, 21, 0, 0, 203, 56, 1, 0, 0, 0, 204, 206, 3, 55, 27, 0, 205, 204, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 208, 1, 0, 0, 0, 207, 209, 3, 71, 35, 0, 208, 207, 1, 0, 0, 0, 209, 210, 1, 0, 0, 0, 210, 208, 1, 0, 0, 0, 210, 211, 1, 0, 0, 0, 211, 219, 1, 0, 0, 0, 212, 216, 5, 46, 0, 0, 213, 215, 3, 71, 35, 0, 214, 213, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 220, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 212, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 230, 1, 0, 0, 0, 221, 223, 7, 3, 0, 0, 222, 224, 3, 55, 27, 0, 223, 222, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 226, 1, 0, 0, 0, 225, 227, 3, 71, 35, 0, 226, 225, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 231, 1, 0, 0, 0, 230, 221, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 253, 1, 0, 0, 0, 232, 234, 3, 55, 27, 0, 233, 232, 1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 237, 5, 46, 0, 0, 236, 238, 3, 71, 35, 0, 237, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0, 0, 0, 241, 243, 7, 3, 0, 0, 242, 244, 3, 55, 27, 0, 243, 242, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 71, 35, 0, 246, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251, 253, 1, 0, 0, 0, 252, 205, 1, 0, 0, 0, 252, 233, 1, 0, 0, 0, 253, 58, 1, 0, 0, 0, 254, 260, 5, 34, 0, 0, 255, 259, 8, 22, 0, 0, 256, 257, 5, 92, 0, 0, 257, 259, 9, 0, 0, 0, 258, 255, 1, 0, 0, 0, 258, 256, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 263, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 275, 5, 34, 0, 0, 264, 270, 5, 39, 0, 0, 265, 269, 8, 23, 0, 0, 266, 267, 5, 92, 0, 0, 267, 269, 9, 0, 0, 0, 268, 265, 1, 0, 0, 0, 268, 266, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 5, 39, 0, 0, 274, 254, 1, 0, 0, 0, 274, 264, 1, 0, 0, 0, 275, 60, 1, 0, 0, 0, 276, 280, 7, 24, 0, 0, 277, 279, 7, 25, 0, 0, 278, 277, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 62, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 284, 5, 91, 0, 0, 284, 285, 5, 93, 0, 0, 285, 64, 1, 0, 0, 0, 286, 287, 5, 91, 0, 0, 287, 288, 5, 42, 0, 0, 288, 289, 5, 93, 0, 0, 289, 66, 1, 0, 0, 0, 290, 297, 3, 61, 30, 0, 291, 292, 5, 46, 0, 0, 292, 296, 3, 61, 30, 0, 293, 296, 3, 63, 31, 0, 294, 296, 3, 65, 32, 0, 295, 291, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 295, 294, 1, 0, 0, 0, 296, 299, 1, 0, 0, 0, 297, 295, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 68, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 300, 302, 7, 26, 0, 0, 301, 300, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 303, 304, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 306, 6, 34, 0, 0, 306, 70, 1, 0, 0, 0, 307, 308, 7, 27, 0, 0, 308, 72, 1, 0, 0, 0, 309, 311, 8, 28, 0, 0, 310, 309, 1, 0, 0, 0, 311, 312, 1, 0, 0, 0, 312, 310, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 74, 1, 0, 0, 0, 28, 0, 88, 131, 148, 200, 205, 210, 216, 219, 223, 228, 230, 233, 239, 243, 248, 250, 252, 258, 260, 268, 270, 274, 280, 295, 297, 303, 312, 1, 6, 0, 0] \ No newline at end of file +[4, 0, 32, 320, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 3, 5, 89, 8, 5, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 132, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 149, 8, 17, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 3, 26, 201, 8, 26, 1, 27, 1, 27, 1, 28, 3, 28, 206, 8, 28, 1, 28, 4, 28, 209, 8, 28, 11, 28, 12, 28, 210, 1, 28, 1, 28, 5, 28, 215, 8, 28, 10, 28, 12, 28, 218, 9, 28, 3, 28, 220, 8, 28, 1, 28, 1, 28, 3, 28, 224, 8, 28, 1, 28, 4, 28, 227, 8, 28, 11, 28, 12, 28, 228, 3, 28, 231, 8, 28, 1, 28, 3, 28, 234, 8, 28, 1, 28, 1, 28, 4, 28, 238, 8, 28, 11, 28, 12, 28, 239, 1, 28, 1, 28, 3, 28, 244, 8, 28, 1, 28, 4, 28, 247, 8, 28, 11, 28, 12, 28, 248, 3, 28, 251, 8, 28, 3, 28, 253, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 259, 8, 29, 10, 29, 12, 29, 262, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 269, 8, 29, 10, 29, 12, 29, 272, 9, 29, 1, 29, 3, 29, 275, 8, 29, 1, 30, 1, 30, 5, 30, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 4, 33, 298, 8, 33, 11, 33, 12, 33, 299, 5, 33, 302, 8, 33, 10, 33, 12, 33, 305, 9, 33, 1, 34, 4, 34, 308, 8, 34, 11, 34, 12, 34, 309, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 4, 36, 317, 8, 36, 11, 36, 12, 36, 318, 0, 0, 37, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 0, 57, 28, 59, 29, 61, 0, 63, 0, 65, 0, 67, 30, 69, 31, 71, 0, 73, 32, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116, 116, 2, 0, 87, 87, 119, 119, 2, 0, 78, 78, 110, 110, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 79, 79, 111, 111, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 4, 0, 35, 36, 64, 90, 95, 95, 97, 123, 7, 0, 35, 36, 45, 45, 47, 58, 64, 90, 95, 95, 97, 123, 125, 125, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 344, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 73, 1, 0, 0, 0, 1, 75, 1, 0, 0, 0, 3, 77, 1, 0, 0, 0, 5, 79, 1, 0, 0, 0, 7, 81, 1, 0, 0, 0, 9, 83, 1, 0, 0, 0, 11, 88, 1, 0, 0, 0, 13, 90, 1, 0, 0, 0, 15, 93, 1, 0, 0, 0, 17, 96, 1, 0, 0, 0, 19, 98, 1, 0, 0, 0, 21, 101, 1, 0, 0, 0, 23, 103, 1, 0, 0, 0, 25, 106, 1, 0, 0, 0, 27, 111, 1, 0, 0, 0, 29, 117, 1, 0, 0, 0, 31, 125, 1, 0, 0, 0, 33, 133, 1, 0, 0, 0, 35, 140, 1, 0, 0, 0, 37, 150, 1, 0, 0, 0, 39, 153, 1, 0, 0, 0, 41, 157, 1, 0, 0, 0, 43, 161, 1, 0, 0, 0, 45, 164, 1, 0, 0, 0, 47, 173, 1, 0, 0, 0, 49, 177, 1, 0, 0, 0, 51, 184, 1, 0, 0, 0, 53, 200, 1, 0, 0, 0, 55, 202, 1, 0, 0, 0, 57, 252, 1, 0, 0, 0, 59, 274, 1, 0, 0, 0, 61, 276, 1, 0, 0, 0, 63, 283, 1, 0, 0, 0, 65, 286, 1, 0, 0, 0, 67, 290, 1, 0, 0, 0, 69, 307, 1, 0, 0, 0, 71, 313, 1, 0, 0, 0, 73, 316, 1, 0, 0, 0, 75, 76, 5, 40, 0, 0, 76, 2, 1, 0, 0, 0, 77, 78, 5, 41, 0, 0, 78, 4, 1, 0, 0, 0, 79, 80, 5, 91, 0, 0, 80, 6, 1, 0, 0, 0, 81, 82, 5, 93, 0, 0, 82, 8, 1, 0, 0, 0, 83, 84, 5, 44, 0, 0, 84, 10, 1, 0, 0, 0, 85, 89, 5, 61, 0, 0, 86, 87, 5, 61, 0, 0, 87, 89, 5, 61, 0, 0, 88, 85, 1, 0, 0, 0, 88, 86, 1, 0, 0, 0, 89, 12, 1, 0, 0, 0, 90, 91, 5, 33, 0, 0, 91, 92, 5, 61, 0, 0, 92, 14, 1, 0, 0, 0, 93, 94, 5, 60, 0, 0, 94, 95, 5, 62, 0, 0, 95, 16, 1, 0, 0, 0, 96, 97, 5, 60, 0, 0, 97, 18, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 100, 5, 61, 0, 0, 100, 20, 1, 0, 0, 0, 101, 102, 5, 62, 0, 0, 102, 22, 1, 0, 0, 0, 103, 104, 5, 62, 0, 0, 104, 105, 5, 61, 0, 0, 105, 24, 1, 0, 0, 0, 106, 107, 7, 0, 0, 0, 107, 108, 7, 1, 0, 0, 108, 109, 7, 2, 0, 0, 109, 110, 7, 3, 0, 0, 110, 26, 1, 0, 0, 0, 111, 112, 7, 1, 0, 0, 112, 113, 7, 0, 0, 0, 113, 114, 7, 1, 0, 0, 114, 115, 7, 2, 0, 0, 115, 116, 7, 3, 0, 0, 116, 28, 1, 0, 0, 0, 117, 118, 7, 4, 0, 0, 118, 119, 7, 3, 0, 0, 119, 120, 7, 5, 0, 0, 120, 121, 7, 6, 0, 0, 121, 122, 7, 3, 0, 0, 122, 123, 7, 3, 0, 0, 123, 124, 7, 7, 0, 0, 124, 30, 1, 0, 0, 0, 125, 126, 7, 3, 0, 0, 126, 127, 7, 8, 0, 0, 127, 128, 7, 1, 0, 0, 128, 129, 7, 9, 0, 0, 129, 131, 7, 5, 0, 0, 130, 132, 7, 9, 0, 0, 131, 130, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 32, 1, 0, 0, 0, 133, 134, 7, 10, 0, 0, 134, 135, 7, 3, 0, 0, 135, 136, 7, 11, 0, 0, 136, 137, 7, 3, 0, 0, 137, 138, 7, 8, 0, 0, 138, 139, 7, 12, 0, 0, 139, 34, 1, 0, 0, 0, 140, 141, 7, 13, 0, 0, 141, 142, 7, 14, 0, 0, 142, 143, 7, 7, 0, 0, 143, 144, 7, 5, 0, 0, 144, 145, 7, 15, 0, 0, 145, 146, 7, 1, 0, 0, 146, 148, 7, 7, 0, 0, 147, 149, 7, 9, 0, 0, 148, 147, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, 36, 1, 0, 0, 0, 150, 151, 7, 1, 0, 0, 151, 152, 7, 7, 0, 0, 152, 38, 1, 0, 0, 0, 153, 154, 7, 7, 0, 0, 154, 155, 7, 14, 0, 0, 155, 156, 7, 5, 0, 0, 156, 40, 1, 0, 0, 0, 157, 158, 7, 15, 0, 0, 158, 159, 7, 7, 0, 0, 159, 160, 7, 16, 0, 0, 160, 42, 1, 0, 0, 0, 161, 162, 7, 14, 0, 0, 162, 163, 7, 10, 0, 0, 163, 44, 1, 0, 0, 0, 164, 165, 7, 17, 0, 0, 165, 166, 7, 15, 0, 0, 166, 167, 7, 9, 0, 0, 167, 168, 7, 5, 0, 0, 168, 169, 7, 14, 0, 0, 169, 170, 7, 2, 0, 0, 170, 171, 7, 3, 0, 0, 171, 172, 7, 7, 0, 0, 172, 46, 1, 0, 0, 0, 173, 174, 7, 17, 0, 0, 174, 175, 7, 15, 0, 0, 175, 176, 7, 9, 0, 0, 176, 48, 1, 0, 0, 0, 177, 178, 7, 17, 0, 0, 178, 179, 7, 15, 0, 0, 179, 180, 7, 9, 0, 0, 180, 181, 7, 15, 0, 0, 181, 182, 7, 7, 0, 0, 182, 183, 7, 18, 0, 0, 183, 50, 1, 0, 0, 0, 184, 185, 7, 17, 0, 0, 185, 186, 7, 15, 0, 0, 186, 187, 7, 9, 0, 0, 187, 188, 7, 15, 0, 0, 188, 189, 7, 0, 0, 0, 189, 190, 7, 0, 0, 0, 190, 52, 1, 0, 0, 0, 191, 192, 7, 5, 0, 0, 192, 193, 7, 10, 0, 0, 193, 194, 7, 19, 0, 0, 194, 201, 7, 3, 0, 0, 195, 196, 7, 20, 0, 0, 196, 197, 7, 15, 0, 0, 197, 198, 7, 0, 0, 0, 198, 199, 7, 9, 0, 0, 199, 201, 7, 3, 0, 0, 200, 191, 1, 0, 0, 0, 200, 195, 1, 0, 0, 0, 201, 54, 1, 0, 0, 0, 202, 203, 7, 21, 0, 0, 203, 56, 1, 0, 0, 0, 204, 206, 3, 55, 27, 0, 205, 204, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 208, 1, 0, 0, 0, 207, 209, 3, 71, 35, 0, 208, 207, 1, 0, 0, 0, 209, 210, 1, 0, 0, 0, 210, 208, 1, 0, 0, 0, 210, 211, 1, 0, 0, 0, 211, 219, 1, 0, 0, 0, 212, 216, 5, 46, 0, 0, 213, 215, 3, 71, 35, 0, 214, 213, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 220, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 212, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, 220, 230, 1, 0, 0, 0, 221, 223, 7, 3, 0, 0, 222, 224, 3, 55, 27, 0, 223, 222, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 226, 1, 0, 0, 0, 225, 227, 3, 71, 35, 0, 226, 225, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 231, 1, 0, 0, 0, 230, 221, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 253, 1, 0, 0, 0, 232, 234, 3, 55, 27, 0, 233, 232, 1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 237, 5, 46, 0, 0, 236, 238, 3, 71, 35, 0, 237, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0, 0, 0, 241, 243, 7, 3, 0, 0, 242, 244, 3, 55, 27, 0, 243, 242, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 71, 35, 0, 246, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, 1, 0, 0, 0, 251, 253, 1, 0, 0, 0, 252, 205, 1, 0, 0, 0, 252, 233, 1, 0, 0, 0, 253, 58, 1, 0, 0, 0, 254, 260, 5, 34, 0, 0, 255, 259, 8, 22, 0, 0, 256, 257, 5, 92, 0, 0, 257, 259, 9, 0, 0, 0, 258, 255, 1, 0, 0, 0, 258, 256, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 263, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 275, 5, 34, 0, 0, 264, 270, 5, 39, 0, 0, 265, 269, 8, 23, 0, 0, 266, 267, 5, 92, 0, 0, 267, 269, 9, 0, 0, 0, 268, 265, 1, 0, 0, 0, 268, 266, 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 5, 39, 0, 0, 274, 254, 1, 0, 0, 0, 274, 264, 1, 0, 0, 0, 275, 60, 1, 0, 0, 0, 276, 280, 7, 24, 0, 0, 277, 279, 7, 25, 0, 0, 278, 277, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 62, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 283, 284, 5, 91, 0, 0, 284, 285, 5, 93, 0, 0, 285, 64, 1, 0, 0, 0, 286, 287, 5, 91, 0, 0, 287, 288, 5, 42, 0, 0, 288, 289, 5, 93, 0, 0, 289, 66, 1, 0, 0, 0, 290, 303, 3, 61, 30, 0, 291, 292, 5, 46, 0, 0, 292, 302, 3, 61, 30, 0, 293, 302, 3, 63, 31, 0, 294, 302, 3, 65, 32, 0, 295, 297, 5, 46, 0, 0, 296, 298, 3, 71, 35, 0, 297, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 299, 300, 1, 0, 0, 0, 300, 302, 1, 0, 0, 0, 301, 291, 1, 0, 0, 0, 301, 293, 1, 0, 0, 0, 301, 294, 1, 0, 0, 0, 301, 295, 1, 0, 0, 0, 302, 305, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 303, 304, 1, 0, 0, 0, 304, 68, 1, 0, 0, 0, 305, 303, 1, 0, 0, 0, 306, 308, 7, 26, 0, 0, 307, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 309, 310, 1, 0, 0, 0, 310, 311, 1, 0, 0, 0, 311, 312, 6, 34, 0, 0, 312, 70, 1, 0, 0, 0, 313, 314, 7, 27, 0, 0, 314, 72, 1, 0, 0, 0, 315, 317, 8, 28, 0, 0, 316, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 316, 1, 0, 0, 0, 318, 319, 1, 0, 0, 0, 319, 74, 1, 0, 0, 0, 29, 0, 88, 131, 148, 200, 205, 210, 216, 219, 223, 228, 230, 233, 239, 243, 248, 250, 252, 258, 260, 268, 270, 274, 280, 299, 301, 303, 309, 318, 1, 6, 0, 0] \ No newline at end of file diff --git a/pkg/parser/grammar/filterquery_lexer.go b/pkg/parser/grammar/filterquery_lexer.go index 88f94ec69c24..ac770955d730 100644 --- a/pkg/parser/grammar/filterquery_lexer.go +++ b/pkg/parser/grammar/filterquery_lexer.go @@ -61,7 +61,7 @@ func filterquerylexerLexerInit() { } staticData.PredictionContextCache = antlr.NewPredictionContextCache() staticData.serializedATN = []int32{ - 4, 0, 32, 314, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, + 4, 0, 32, 320, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, @@ -90,127 +90,130 @@ func filterquerylexerLexerInit() { 8, 29, 10, 29, 12, 29, 262, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 269, 8, 29, 10, 29, 12, 29, 272, 9, 29, 1, 29, 3, 29, 275, 8, 29, 1, 30, 1, 30, 5, 30, 279, 8, 30, 10, 30, 12, 30, 282, 9, 30, 1, 31, 1, 31, - 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 5, - 33, 296, 8, 33, 10, 33, 12, 33, 299, 9, 33, 1, 34, 4, 34, 302, 8, 34, 11, - 34, 12, 34, 303, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 4, 36, 311, 8, 36, - 11, 36, 12, 36, 312, 0, 0, 37, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, - 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, - 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, - 51, 26, 53, 27, 55, 0, 57, 28, 59, 29, 61, 0, 63, 0, 65, 0, 67, 30, 69, - 31, 71, 0, 73, 32, 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, - 105, 2, 0, 75, 75, 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 66, 66, 98, - 98, 2, 0, 84, 84, 116, 116, 2, 0, 87, 87, 119, 119, 2, 0, 78, 78, 110, - 110, 2, 0, 88, 88, 120, 120, 2, 0, 83, 83, 115, 115, 2, 0, 82, 82, 114, - 114, 2, 0, 71, 71, 103, 103, 2, 0, 80, 80, 112, 112, 2, 0, 67, 67, 99, - 99, 2, 0, 79, 79, 111, 111, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, - 2, 0, 72, 72, 104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, - 2, 0, 70, 70, 102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, - 0, 39, 39, 92, 92, 4, 0, 36, 36, 65, 90, 95, 95, 97, 122, 6, 0, 36, 36, - 45, 45, 47, 58, 65, 90, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, - 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, - 93, 93, 336, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, - 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, - 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, - 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, - 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, - 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, - 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, - 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, - 69, 1, 0, 0, 0, 0, 73, 1, 0, 0, 0, 1, 75, 1, 0, 0, 0, 3, 77, 1, 0, 0, 0, - 5, 79, 1, 0, 0, 0, 7, 81, 1, 0, 0, 0, 9, 83, 1, 0, 0, 0, 11, 88, 1, 0, - 0, 0, 13, 90, 1, 0, 0, 0, 15, 93, 1, 0, 0, 0, 17, 96, 1, 0, 0, 0, 19, 98, - 1, 0, 0, 0, 21, 101, 1, 0, 0, 0, 23, 103, 1, 0, 0, 0, 25, 106, 1, 0, 0, - 0, 27, 111, 1, 0, 0, 0, 29, 117, 1, 0, 0, 0, 31, 125, 1, 0, 0, 0, 33, 133, - 1, 0, 0, 0, 35, 140, 1, 0, 0, 0, 37, 150, 1, 0, 0, 0, 39, 153, 1, 0, 0, - 0, 41, 157, 1, 0, 0, 0, 43, 161, 1, 0, 0, 0, 45, 164, 1, 0, 0, 0, 47, 173, - 1, 0, 0, 0, 49, 177, 1, 0, 0, 0, 51, 184, 1, 0, 0, 0, 53, 200, 1, 0, 0, - 0, 55, 202, 1, 0, 0, 0, 57, 252, 1, 0, 0, 0, 59, 274, 1, 0, 0, 0, 61, 276, - 1, 0, 0, 0, 63, 283, 1, 0, 0, 0, 65, 286, 1, 0, 0, 0, 67, 290, 1, 0, 0, - 0, 69, 301, 1, 0, 0, 0, 71, 307, 1, 0, 0, 0, 73, 310, 1, 0, 0, 0, 75, 76, - 5, 40, 0, 0, 76, 2, 1, 0, 0, 0, 77, 78, 5, 41, 0, 0, 78, 4, 1, 0, 0, 0, - 79, 80, 5, 91, 0, 0, 80, 6, 1, 0, 0, 0, 81, 82, 5, 93, 0, 0, 82, 8, 1, - 0, 0, 0, 83, 84, 5, 44, 0, 0, 84, 10, 1, 0, 0, 0, 85, 89, 5, 61, 0, 0, - 86, 87, 5, 61, 0, 0, 87, 89, 5, 61, 0, 0, 88, 85, 1, 0, 0, 0, 88, 86, 1, - 0, 0, 0, 89, 12, 1, 0, 0, 0, 90, 91, 5, 33, 0, 0, 91, 92, 5, 61, 0, 0, - 92, 14, 1, 0, 0, 0, 93, 94, 5, 60, 0, 0, 94, 95, 5, 62, 0, 0, 95, 16, 1, - 0, 0, 0, 96, 97, 5, 60, 0, 0, 97, 18, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, - 99, 100, 5, 61, 0, 0, 100, 20, 1, 0, 0, 0, 101, 102, 5, 62, 0, 0, 102, - 22, 1, 0, 0, 0, 103, 104, 5, 62, 0, 0, 104, 105, 5, 61, 0, 0, 105, 24, - 1, 0, 0, 0, 106, 107, 7, 0, 0, 0, 107, 108, 7, 1, 0, 0, 108, 109, 7, 2, - 0, 0, 109, 110, 7, 3, 0, 0, 110, 26, 1, 0, 0, 0, 111, 112, 7, 1, 0, 0, - 112, 113, 7, 0, 0, 0, 113, 114, 7, 1, 0, 0, 114, 115, 7, 2, 0, 0, 115, - 116, 7, 3, 0, 0, 116, 28, 1, 0, 0, 0, 117, 118, 7, 4, 0, 0, 118, 119, 7, - 3, 0, 0, 119, 120, 7, 5, 0, 0, 120, 121, 7, 6, 0, 0, 121, 122, 7, 3, 0, - 0, 122, 123, 7, 3, 0, 0, 123, 124, 7, 7, 0, 0, 124, 30, 1, 0, 0, 0, 125, - 126, 7, 3, 0, 0, 126, 127, 7, 8, 0, 0, 127, 128, 7, 1, 0, 0, 128, 129, - 7, 9, 0, 0, 129, 131, 7, 5, 0, 0, 130, 132, 7, 9, 0, 0, 131, 130, 1, 0, - 0, 0, 131, 132, 1, 0, 0, 0, 132, 32, 1, 0, 0, 0, 133, 134, 7, 10, 0, 0, - 134, 135, 7, 3, 0, 0, 135, 136, 7, 11, 0, 0, 136, 137, 7, 3, 0, 0, 137, - 138, 7, 8, 0, 0, 138, 139, 7, 12, 0, 0, 139, 34, 1, 0, 0, 0, 140, 141, - 7, 13, 0, 0, 141, 142, 7, 14, 0, 0, 142, 143, 7, 7, 0, 0, 143, 144, 7, - 5, 0, 0, 144, 145, 7, 15, 0, 0, 145, 146, 7, 1, 0, 0, 146, 148, 7, 7, 0, - 0, 147, 149, 7, 9, 0, 0, 148, 147, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, - 36, 1, 0, 0, 0, 150, 151, 7, 1, 0, 0, 151, 152, 7, 7, 0, 0, 152, 38, 1, - 0, 0, 0, 153, 154, 7, 7, 0, 0, 154, 155, 7, 14, 0, 0, 155, 156, 7, 5, 0, - 0, 156, 40, 1, 0, 0, 0, 157, 158, 7, 15, 0, 0, 158, 159, 7, 7, 0, 0, 159, - 160, 7, 16, 0, 0, 160, 42, 1, 0, 0, 0, 161, 162, 7, 14, 0, 0, 162, 163, - 7, 10, 0, 0, 163, 44, 1, 0, 0, 0, 164, 165, 7, 17, 0, 0, 165, 166, 7, 15, - 0, 0, 166, 167, 7, 9, 0, 0, 167, 168, 7, 5, 0, 0, 168, 169, 7, 14, 0, 0, - 169, 170, 7, 2, 0, 0, 170, 171, 7, 3, 0, 0, 171, 172, 7, 7, 0, 0, 172, - 46, 1, 0, 0, 0, 173, 174, 7, 17, 0, 0, 174, 175, 7, 15, 0, 0, 175, 176, - 7, 9, 0, 0, 176, 48, 1, 0, 0, 0, 177, 178, 7, 17, 0, 0, 178, 179, 7, 15, - 0, 0, 179, 180, 7, 9, 0, 0, 180, 181, 7, 15, 0, 0, 181, 182, 7, 7, 0, 0, - 182, 183, 7, 18, 0, 0, 183, 50, 1, 0, 0, 0, 184, 185, 7, 17, 0, 0, 185, - 186, 7, 15, 0, 0, 186, 187, 7, 9, 0, 0, 187, 188, 7, 15, 0, 0, 188, 189, - 7, 0, 0, 0, 189, 190, 7, 0, 0, 0, 190, 52, 1, 0, 0, 0, 191, 192, 7, 5, - 0, 0, 192, 193, 7, 10, 0, 0, 193, 194, 7, 19, 0, 0, 194, 201, 7, 3, 0, - 0, 195, 196, 7, 20, 0, 0, 196, 197, 7, 15, 0, 0, 197, 198, 7, 0, 0, 0, - 198, 199, 7, 9, 0, 0, 199, 201, 7, 3, 0, 0, 200, 191, 1, 0, 0, 0, 200, - 195, 1, 0, 0, 0, 201, 54, 1, 0, 0, 0, 202, 203, 7, 21, 0, 0, 203, 56, 1, - 0, 0, 0, 204, 206, 3, 55, 27, 0, 205, 204, 1, 0, 0, 0, 205, 206, 1, 0, - 0, 0, 206, 208, 1, 0, 0, 0, 207, 209, 3, 71, 35, 0, 208, 207, 1, 0, 0, - 0, 209, 210, 1, 0, 0, 0, 210, 208, 1, 0, 0, 0, 210, 211, 1, 0, 0, 0, 211, - 219, 1, 0, 0, 0, 212, 216, 5, 46, 0, 0, 213, 215, 3, 71, 35, 0, 214, 213, - 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, - 0, 0, 217, 220, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 212, 1, 0, 0, 0, - 219, 220, 1, 0, 0, 0, 220, 230, 1, 0, 0, 0, 221, 223, 7, 3, 0, 0, 222, - 224, 3, 55, 27, 0, 223, 222, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 226, - 1, 0, 0, 0, 225, 227, 3, 71, 35, 0, 226, 225, 1, 0, 0, 0, 227, 228, 1, - 0, 0, 0, 228, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 231, 1, 0, 0, - 0, 230, 221, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 253, 1, 0, 0, 0, 232, - 234, 3, 55, 27, 0, 233, 232, 1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 235, - 1, 0, 0, 0, 235, 237, 5, 46, 0, 0, 236, 238, 3, 71, 35, 0, 237, 236, 1, - 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, - 0, 240, 250, 1, 0, 0, 0, 241, 243, 7, 3, 0, 0, 242, 244, 3, 55, 27, 0, - 243, 242, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, - 247, 3, 71, 35, 0, 246, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 246, - 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 251, 1, 0, 0, 0, 250, 241, 1, 0, - 0, 0, 250, 251, 1, 0, 0, 0, 251, 253, 1, 0, 0, 0, 252, 205, 1, 0, 0, 0, - 252, 233, 1, 0, 0, 0, 253, 58, 1, 0, 0, 0, 254, 260, 5, 34, 0, 0, 255, - 259, 8, 22, 0, 0, 256, 257, 5, 92, 0, 0, 257, 259, 9, 0, 0, 0, 258, 255, - 1, 0, 0, 0, 258, 256, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, - 0, 0, 260, 261, 1, 0, 0, 0, 261, 263, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, - 263, 275, 5, 34, 0, 0, 264, 270, 5, 39, 0, 0, 265, 269, 8, 23, 0, 0, 266, - 267, 5, 92, 0, 0, 267, 269, 9, 0, 0, 0, 268, 265, 1, 0, 0, 0, 268, 266, - 1, 0, 0, 0, 269, 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, - 0, 0, 271, 273, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 5, 39, 0, 0, - 274, 254, 1, 0, 0, 0, 274, 264, 1, 0, 0, 0, 275, 60, 1, 0, 0, 0, 276, 280, - 7, 24, 0, 0, 277, 279, 7, 25, 0, 0, 278, 277, 1, 0, 0, 0, 279, 282, 1, - 0, 0, 0, 280, 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 62, 1, 0, 0, - 0, 282, 280, 1, 0, 0, 0, 283, 284, 5, 91, 0, 0, 284, 285, 5, 93, 0, 0, - 285, 64, 1, 0, 0, 0, 286, 287, 5, 91, 0, 0, 287, 288, 5, 42, 0, 0, 288, - 289, 5, 93, 0, 0, 289, 66, 1, 0, 0, 0, 290, 297, 3, 61, 30, 0, 291, 292, - 5, 46, 0, 0, 292, 296, 3, 61, 30, 0, 293, 296, 3, 63, 31, 0, 294, 296, - 3, 65, 32, 0, 295, 291, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 295, 294, 1, - 0, 0, 0, 296, 299, 1, 0, 0, 0, 297, 295, 1, 0, 0, 0, 297, 298, 1, 0, 0, - 0, 298, 68, 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 300, 302, 7, 26, 0, 0, 301, - 300, 1, 0, 0, 0, 302, 303, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 303, 304, - 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 306, 6, 34, 0, 0, 306, 70, 1, 0, - 0, 0, 307, 308, 7, 27, 0, 0, 308, 72, 1, 0, 0, 0, 309, 311, 8, 28, 0, 0, - 310, 309, 1, 0, 0, 0, 311, 312, 1, 0, 0, 0, 312, 310, 1, 0, 0, 0, 312, - 313, 1, 0, 0, 0, 313, 74, 1, 0, 0, 0, 28, 0, 88, 131, 148, 200, 205, 210, - 216, 219, 223, 228, 230, 233, 239, 243, 248, 250, 252, 258, 260, 268, 270, - 274, 280, 295, 297, 303, 312, 1, 6, 0, 0, + 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, + 33, 1, 33, 4, 33, 298, 8, 33, 11, 33, 12, 33, 299, 5, 33, 302, 8, 33, 10, + 33, 12, 33, 305, 9, 33, 1, 34, 4, 34, 308, 8, 34, 11, 34, 12, 34, 309, + 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 4, 36, 317, 8, 36, 11, 36, 12, 36, 318, + 0, 0, 37, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, + 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, + 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, + 0, 57, 28, 59, 29, 61, 0, 63, 0, 65, 0, 67, 30, 69, 31, 71, 0, 73, 32, + 1, 0, 29, 2, 0, 76, 76, 108, 108, 2, 0, 73, 73, 105, 105, 2, 0, 75, 75, + 107, 107, 2, 0, 69, 69, 101, 101, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116, + 116, 2, 0, 87, 87, 119, 119, 2, 0, 78, 78, 110, 110, 2, 0, 88, 88, 120, + 120, 2, 0, 83, 83, 115, 115, 2, 0, 82, 82, 114, 114, 2, 0, 71, 71, 103, + 103, 2, 0, 80, 80, 112, 112, 2, 0, 67, 67, 99, 99, 2, 0, 79, 79, 111, 111, + 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72, 104, 104, 2, + 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70, 102, 102, 2, + 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92, 92, 4, 0, 35, + 36, 64, 90, 95, 95, 97, 123, 7, 0, 35, 36, 45, 45, 47, 58, 64, 90, 95, + 95, 97, 123, 125, 125, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, + 9, 10, 13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 344, 0, + 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, + 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, + 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, + 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, + 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, + 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, + 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, + 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, + 0, 73, 1, 0, 0, 0, 1, 75, 1, 0, 0, 0, 3, 77, 1, 0, 0, 0, 5, 79, 1, 0, 0, + 0, 7, 81, 1, 0, 0, 0, 9, 83, 1, 0, 0, 0, 11, 88, 1, 0, 0, 0, 13, 90, 1, + 0, 0, 0, 15, 93, 1, 0, 0, 0, 17, 96, 1, 0, 0, 0, 19, 98, 1, 0, 0, 0, 21, + 101, 1, 0, 0, 0, 23, 103, 1, 0, 0, 0, 25, 106, 1, 0, 0, 0, 27, 111, 1, + 0, 0, 0, 29, 117, 1, 0, 0, 0, 31, 125, 1, 0, 0, 0, 33, 133, 1, 0, 0, 0, + 35, 140, 1, 0, 0, 0, 37, 150, 1, 0, 0, 0, 39, 153, 1, 0, 0, 0, 41, 157, + 1, 0, 0, 0, 43, 161, 1, 0, 0, 0, 45, 164, 1, 0, 0, 0, 47, 173, 1, 0, 0, + 0, 49, 177, 1, 0, 0, 0, 51, 184, 1, 0, 0, 0, 53, 200, 1, 0, 0, 0, 55, 202, + 1, 0, 0, 0, 57, 252, 1, 0, 0, 0, 59, 274, 1, 0, 0, 0, 61, 276, 1, 0, 0, + 0, 63, 283, 1, 0, 0, 0, 65, 286, 1, 0, 0, 0, 67, 290, 1, 0, 0, 0, 69, 307, + 1, 0, 0, 0, 71, 313, 1, 0, 0, 0, 73, 316, 1, 0, 0, 0, 75, 76, 5, 40, 0, + 0, 76, 2, 1, 0, 0, 0, 77, 78, 5, 41, 0, 0, 78, 4, 1, 0, 0, 0, 79, 80, 5, + 91, 0, 0, 80, 6, 1, 0, 0, 0, 81, 82, 5, 93, 0, 0, 82, 8, 1, 0, 0, 0, 83, + 84, 5, 44, 0, 0, 84, 10, 1, 0, 0, 0, 85, 89, 5, 61, 0, 0, 86, 87, 5, 61, + 0, 0, 87, 89, 5, 61, 0, 0, 88, 85, 1, 0, 0, 0, 88, 86, 1, 0, 0, 0, 89, + 12, 1, 0, 0, 0, 90, 91, 5, 33, 0, 0, 91, 92, 5, 61, 0, 0, 92, 14, 1, 0, + 0, 0, 93, 94, 5, 60, 0, 0, 94, 95, 5, 62, 0, 0, 95, 16, 1, 0, 0, 0, 96, + 97, 5, 60, 0, 0, 97, 18, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 100, 5, 61, + 0, 0, 100, 20, 1, 0, 0, 0, 101, 102, 5, 62, 0, 0, 102, 22, 1, 0, 0, 0, + 103, 104, 5, 62, 0, 0, 104, 105, 5, 61, 0, 0, 105, 24, 1, 0, 0, 0, 106, + 107, 7, 0, 0, 0, 107, 108, 7, 1, 0, 0, 108, 109, 7, 2, 0, 0, 109, 110, + 7, 3, 0, 0, 110, 26, 1, 0, 0, 0, 111, 112, 7, 1, 0, 0, 112, 113, 7, 0, + 0, 0, 113, 114, 7, 1, 0, 0, 114, 115, 7, 2, 0, 0, 115, 116, 7, 3, 0, 0, + 116, 28, 1, 0, 0, 0, 117, 118, 7, 4, 0, 0, 118, 119, 7, 3, 0, 0, 119, 120, + 7, 5, 0, 0, 120, 121, 7, 6, 0, 0, 121, 122, 7, 3, 0, 0, 122, 123, 7, 3, + 0, 0, 123, 124, 7, 7, 0, 0, 124, 30, 1, 0, 0, 0, 125, 126, 7, 3, 0, 0, + 126, 127, 7, 8, 0, 0, 127, 128, 7, 1, 0, 0, 128, 129, 7, 9, 0, 0, 129, + 131, 7, 5, 0, 0, 130, 132, 7, 9, 0, 0, 131, 130, 1, 0, 0, 0, 131, 132, + 1, 0, 0, 0, 132, 32, 1, 0, 0, 0, 133, 134, 7, 10, 0, 0, 134, 135, 7, 3, + 0, 0, 135, 136, 7, 11, 0, 0, 136, 137, 7, 3, 0, 0, 137, 138, 7, 8, 0, 0, + 138, 139, 7, 12, 0, 0, 139, 34, 1, 0, 0, 0, 140, 141, 7, 13, 0, 0, 141, + 142, 7, 14, 0, 0, 142, 143, 7, 7, 0, 0, 143, 144, 7, 5, 0, 0, 144, 145, + 7, 15, 0, 0, 145, 146, 7, 1, 0, 0, 146, 148, 7, 7, 0, 0, 147, 149, 7, 9, + 0, 0, 148, 147, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, 36, 1, 0, 0, 0, + 150, 151, 7, 1, 0, 0, 151, 152, 7, 7, 0, 0, 152, 38, 1, 0, 0, 0, 153, 154, + 7, 7, 0, 0, 154, 155, 7, 14, 0, 0, 155, 156, 7, 5, 0, 0, 156, 40, 1, 0, + 0, 0, 157, 158, 7, 15, 0, 0, 158, 159, 7, 7, 0, 0, 159, 160, 7, 16, 0, + 0, 160, 42, 1, 0, 0, 0, 161, 162, 7, 14, 0, 0, 162, 163, 7, 10, 0, 0, 163, + 44, 1, 0, 0, 0, 164, 165, 7, 17, 0, 0, 165, 166, 7, 15, 0, 0, 166, 167, + 7, 9, 0, 0, 167, 168, 7, 5, 0, 0, 168, 169, 7, 14, 0, 0, 169, 170, 7, 2, + 0, 0, 170, 171, 7, 3, 0, 0, 171, 172, 7, 7, 0, 0, 172, 46, 1, 0, 0, 0, + 173, 174, 7, 17, 0, 0, 174, 175, 7, 15, 0, 0, 175, 176, 7, 9, 0, 0, 176, + 48, 1, 0, 0, 0, 177, 178, 7, 17, 0, 0, 178, 179, 7, 15, 0, 0, 179, 180, + 7, 9, 0, 0, 180, 181, 7, 15, 0, 0, 181, 182, 7, 7, 0, 0, 182, 183, 7, 18, + 0, 0, 183, 50, 1, 0, 0, 0, 184, 185, 7, 17, 0, 0, 185, 186, 7, 15, 0, 0, + 186, 187, 7, 9, 0, 0, 187, 188, 7, 15, 0, 0, 188, 189, 7, 0, 0, 0, 189, + 190, 7, 0, 0, 0, 190, 52, 1, 0, 0, 0, 191, 192, 7, 5, 0, 0, 192, 193, 7, + 10, 0, 0, 193, 194, 7, 19, 0, 0, 194, 201, 7, 3, 0, 0, 195, 196, 7, 20, + 0, 0, 196, 197, 7, 15, 0, 0, 197, 198, 7, 0, 0, 0, 198, 199, 7, 9, 0, 0, + 199, 201, 7, 3, 0, 0, 200, 191, 1, 0, 0, 0, 200, 195, 1, 0, 0, 0, 201, + 54, 1, 0, 0, 0, 202, 203, 7, 21, 0, 0, 203, 56, 1, 0, 0, 0, 204, 206, 3, + 55, 27, 0, 205, 204, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 208, 1, 0, + 0, 0, 207, 209, 3, 71, 35, 0, 208, 207, 1, 0, 0, 0, 209, 210, 1, 0, 0, + 0, 210, 208, 1, 0, 0, 0, 210, 211, 1, 0, 0, 0, 211, 219, 1, 0, 0, 0, 212, + 216, 5, 46, 0, 0, 213, 215, 3, 71, 35, 0, 214, 213, 1, 0, 0, 0, 215, 218, + 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 220, 1, 0, + 0, 0, 218, 216, 1, 0, 0, 0, 219, 212, 1, 0, 0, 0, 219, 220, 1, 0, 0, 0, + 220, 230, 1, 0, 0, 0, 221, 223, 7, 3, 0, 0, 222, 224, 3, 55, 27, 0, 223, + 222, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 226, 1, 0, 0, 0, 225, 227, + 3, 71, 35, 0, 226, 225, 1, 0, 0, 0, 227, 228, 1, 0, 0, 0, 228, 226, 1, + 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 231, 1, 0, 0, 0, 230, 221, 1, 0, 0, + 0, 230, 231, 1, 0, 0, 0, 231, 253, 1, 0, 0, 0, 232, 234, 3, 55, 27, 0, + 233, 232, 1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, + 237, 5, 46, 0, 0, 236, 238, 3, 71, 35, 0, 237, 236, 1, 0, 0, 0, 238, 239, + 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 250, 1, 0, + 0, 0, 241, 243, 7, 3, 0, 0, 242, 244, 3, 55, 27, 0, 243, 242, 1, 0, 0, + 0, 243, 244, 1, 0, 0, 0, 244, 246, 1, 0, 0, 0, 245, 247, 3, 71, 35, 0, + 246, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, + 249, 1, 0, 0, 0, 249, 251, 1, 0, 0, 0, 250, 241, 1, 0, 0, 0, 250, 251, + 1, 0, 0, 0, 251, 253, 1, 0, 0, 0, 252, 205, 1, 0, 0, 0, 252, 233, 1, 0, + 0, 0, 253, 58, 1, 0, 0, 0, 254, 260, 5, 34, 0, 0, 255, 259, 8, 22, 0, 0, + 256, 257, 5, 92, 0, 0, 257, 259, 9, 0, 0, 0, 258, 255, 1, 0, 0, 0, 258, + 256, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, + 1, 0, 0, 0, 261, 263, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 275, 5, 34, + 0, 0, 264, 270, 5, 39, 0, 0, 265, 269, 8, 23, 0, 0, 266, 267, 5, 92, 0, + 0, 267, 269, 9, 0, 0, 0, 268, 265, 1, 0, 0, 0, 268, 266, 1, 0, 0, 0, 269, + 272, 1, 0, 0, 0, 270, 268, 1, 0, 0, 0, 270, 271, 1, 0, 0, 0, 271, 273, + 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 273, 275, 5, 39, 0, 0, 274, 254, 1, 0, + 0, 0, 274, 264, 1, 0, 0, 0, 275, 60, 1, 0, 0, 0, 276, 280, 7, 24, 0, 0, + 277, 279, 7, 25, 0, 0, 278, 277, 1, 0, 0, 0, 279, 282, 1, 0, 0, 0, 280, + 278, 1, 0, 0, 0, 280, 281, 1, 0, 0, 0, 281, 62, 1, 0, 0, 0, 282, 280, 1, + 0, 0, 0, 283, 284, 5, 91, 0, 0, 284, 285, 5, 93, 0, 0, 285, 64, 1, 0, 0, + 0, 286, 287, 5, 91, 0, 0, 287, 288, 5, 42, 0, 0, 288, 289, 5, 93, 0, 0, + 289, 66, 1, 0, 0, 0, 290, 303, 3, 61, 30, 0, 291, 292, 5, 46, 0, 0, 292, + 302, 3, 61, 30, 0, 293, 302, 3, 63, 31, 0, 294, 302, 3, 65, 32, 0, 295, + 297, 5, 46, 0, 0, 296, 298, 3, 71, 35, 0, 297, 296, 1, 0, 0, 0, 298, 299, + 1, 0, 0, 0, 299, 297, 1, 0, 0, 0, 299, 300, 1, 0, 0, 0, 300, 302, 1, 0, + 0, 0, 301, 291, 1, 0, 0, 0, 301, 293, 1, 0, 0, 0, 301, 294, 1, 0, 0, 0, + 301, 295, 1, 0, 0, 0, 302, 305, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 303, + 304, 1, 0, 0, 0, 304, 68, 1, 0, 0, 0, 305, 303, 1, 0, 0, 0, 306, 308, 7, + 26, 0, 0, 307, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 307, 1, 0, 0, + 0, 309, 310, 1, 0, 0, 0, 310, 311, 1, 0, 0, 0, 311, 312, 6, 34, 0, 0, 312, + 70, 1, 0, 0, 0, 313, 314, 7, 27, 0, 0, 314, 72, 1, 0, 0, 0, 315, 317, 8, + 28, 0, 0, 316, 315, 1, 0, 0, 0, 317, 318, 1, 0, 0, 0, 318, 316, 1, 0, 0, + 0, 318, 319, 1, 0, 0, 0, 319, 74, 1, 0, 0, 0, 29, 0, 88, 131, 148, 200, + 205, 210, 216, 219, 223, 228, 230, 233, 239, 243, 248, 250, 252, 258, 260, + 268, 270, 274, 280, 299, 301, 303, 309, 318, 1, 6, 0, 0, } deserializer := antlr.NewATNDeserializer(nil) staticData.atn = deserializer.Deserialize(staticData.serializedATN) diff --git a/pkg/telemetrylogs/filter_expr_logs_test.go b/pkg/telemetrylogs/filter_expr_logs_test.go index 2a46a96066f2..3b9a32dd515b 100644 --- a/pkg/telemetrylogs/filter_expr_logs_test.go +++ b/pkg/telemetrylogs/filter_expr_logs_test.go @@ -121,6 +121,38 @@ func TestFilterExprLogs(t *testing.T) { expectedErrorContains: "", }, + { + category: "Key with curly brace", + query: `{UserId} = "U101"`, + shouldPass: true, + expectedQuery: "WHERE (attributes_string['{UserId}'] = ? AND mapContains(attributes_string, '{UserId}') = ?)", + expectedArgs: []any{"U101", true}, + expectedErrorContains: "", + }, + { + category: "Key with @symbol", + query: `user@email = "u@example.com"`, + shouldPass: true, + expectedQuery: "WHERE (attributes_string['user@email'] = ? AND mapContains(attributes_string, 'user@email') = ?)", + expectedArgs: []any{"u@example.com", true}, + expectedErrorContains: "", + }, + { + category: "Key with @symbol", + query: `#user_name = "anon42069"`, + shouldPass: true, + expectedQuery: "WHERE (attributes_string['#user_name'] = ? AND mapContains(attributes_string, '#user_name') = ?)", + expectedArgs: []any{"anon42069", true}, + expectedErrorContains: "", + }, + { + category: "Key with @symbol", + query: `gen_ai.completion.0.content = "जब तक इस देश में सिनेमा है"`, + shouldPass: true, + expectedQuery: "WHERE (attributes_string['gen_ai.completion.0.content'] = ? AND mapContains(attributes_string, 'gen_ai.completion.0.content') = ?)", + expectedArgs: []any{"जब तक इस देश में सिनेमा है", true}, + expectedErrorContains: "", + }, // Searches with special characters { category: "Special characters", @@ -2437,6 +2469,14 @@ func TestFilterExprLogsConflictNegation(t *testing.T) { expectedArgs: []any{"done", "done"}, expectedErrorContains: "", }, + { + category: "exists", + query: "body EXISTS", + shouldPass: true, + expectedQuery: "WHERE (body <> ? OR mapContains(attributes_string, 'body') = ?)", + expectedArgs: []any{"", true}, + expectedErrorContains: "", + }, } for _, tc := range testCases { diff --git a/pkg/telemetrylogs/test_data.go b/pkg/telemetrylogs/test_data.go index 74714361bbd7..3dd05933d9b2 100644 --- a/pkg/telemetrylogs/test_data.go +++ b/pkg/telemetrylogs/test_data.go @@ -862,6 +862,34 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey { Materialized: true, }, }, + "{UserId}": { + { + Name: "{UserId}", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, + "user@email": { + { + Name: "user@email", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, + "#user_name": { + { + Name: "#user_name", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, + "gen_ai.completion.0.content": { + { + Name: "gen_ai.completion.0.content", + FieldContext: telemetrytypes.FieldContextAttribute, + FieldDataType: telemetrytypes.FieldDataTypeString, + }, + }, } for _, keys := range keysMap { diff --git a/pkg/types/querybuildertypes/querybuildertypesv5/builder_elements.go b/pkg/types/querybuildertypes/querybuildertypesv5/builder_elements.go index a89fed33ef92..06c4c93a6906 100644 --- a/pkg/types/querybuildertypes/querybuildertypesv5/builder_elements.go +++ b/pkg/types/querybuildertypes/querybuildertypesv5/builder_elements.go @@ -129,6 +129,7 @@ func (f FilterOperator) IsNegativeOperator() bool { FilterOperatorILike, FilterOperatorBetween, FilterOperatorIn, + FilterOperatorExists, FilterOperatorRegexp, FilterOperatorContains: return false diff --git a/scripts/grammar/generate-frontend-parser.sh b/scripts/grammar/generate-frontend-parser.sh new file mode 100755 index 000000000000..9fb933bdef7b --- /dev/null +++ b/scripts/grammar/generate-frontend-parser.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -e + +echo "Generating TypeScript parser..." +# Create output directory if it doesn't exist +mkdir -p frontend/src/parser + +# Generate TypeScript parser +antlr4 -Dlanguage=TypeScript -o frontend/src/parser grammar/FilterQuery.g4 -visitor + +echo "TypeScript parser generation complete" From ac81eab7bb350b35bce0809a8041bf5217161f37 Mon Sep 17 00:00:00 2001 From: aniketio-ctrl Date: Mon, 15 Sep 2025 15:00:12 +0530 Subject: [PATCH 24/51] chore: added cumulative window support (#8828) * feat(multi-threshold): added multi threshold * Update pkg/types/ruletypes/api_params.go Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com> * feat(multiple-threshold): added multiple thresholds * Update pkg/types/ruletypes/alerting.go Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com> * feat(multiple-threshold): added multiple thresholds * feat(cumulative-window): added cumulative window * feat(multi-threshold): added recovery min points * Update pkg/query-service/rules/threshold_rule.go Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com> * feat(multi-threshold): fixed log lines * feat(multi-threshold): added severity as threshold name * feat(cumulative-window): added cumulative window for alerts v2 * feat(multi-threshold): removed break to send multi threshold alerts * feat(multi-threshold): removed break to send multi threshold alerts * feat(cumulative-window): segregated json marshalling with evaluation logic * feat(multi-threshold): corrected the test cases * feat(cumulative-window): segregated json marshalling and evaluation logic * feat(cumulative-window): segregated json marshalling and evaluation logic * feat(multi-threshold): added segregation on json marshalling and actual threhsold logic * feat(multi-threshold): added segregation on json marshalling and actual threhsold logic * feat(cumulative-window): segregated json marshalling and evaluation logic * feat(multi-threshold): added segregation on json marshalling and actual threhsold logic * feat(cumulative-window): segregated json marshalling and evaluation logic * feat(multi-threhsold): added error wrapper * feat(multi-threhsold): added error wrapper * feat(cumulative-window): segregated json marshalling and evaluation logic * feat(multi-threhsold): added error wrapper * Update pkg/types/ruletypes/threshold.go Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com> * feat(cumulative-window): segregated json marshalling and evaluation logic * feat(multi-threshold): added validation and error propagation * feat(multi-notification): removed pre defined labels from links of log and traces * feat(multi-notification): removed pre defined labels from links of log and traces * feat(multi-threshold): added json parser for gettable rule * feat(multi-threshold): added json parser for gettable rule * feat(multi-threshold): added json parser for gettable rule * feat(multi-threshold): added umnarshaller for postable rule * feat(multi-threshold): added umnarshaller for postable rule * feat(cumulative-window): added validation check * Update pkg/types/ruletypes/evaluation.go Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com> * feat(multi-threhsold): removed yaml support for alerts * Update pkg/types/ruletypes/evaluation.go Co-authored-by: Srikanth Chekuri * Update pkg/types/ruletypes/evaluation.go Co-authored-by: Srikanth Chekuri * chore(cumulative-window): renamed funcitons * chore(cumulative-window): removed naked errors * chore(cumulative-window): added reset boundary condition tests * chore(cumulative-window): added reset boundary condition tests * chore(cumulative-window): sorted imports * chore(cumulative-window): sorted imports * chore(cumulative-window): sorted imports * chore(cumulative-window): removed error from next window for * chore(cumulative-window): removed error from next window for * chore(cumulative-window): added case for timezone * chore(cumulative-window): added validation for eval window * chore(cumulative-window): updated api structure for cumulative window * chore(cumulative-window): updated schedule enum --------- Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com> Co-authored-by: Srikanth Chekuri --- ee/query-service/rules/anomaly.go | 13 +- ee/query-service/rules/manager.go | 12 +- pkg/query-service/rules/base_rule.go | 14 +- pkg/query-service/rules/manager.go | 17 +- pkg/query-service/rules/prom_rule.go | 3 +- pkg/query-service/rules/promrule_test.go | 12 +- .../rules/threshold_rule_test.go | 168 ++-- pkg/types/ruletypes/api_params.go | 5 + pkg/types/ruletypes/evaluation.go | 287 ++++++ pkg/types/ruletypes/evaluation_test.go | 878 ++++++++++++++++++ 10 files changed, 1311 insertions(+), 98 deletions(-) create mode 100644 pkg/types/ruletypes/evaluation.go create mode 100644 pkg/types/ruletypes/evaluation_test.go diff --git a/ee/query-service/rules/anomaly.go b/ee/query-service/rules/anomaly.go index ff0aa40be8d8..2ac3b56cb949 100644 --- a/ee/query-service/rules/anomaly.go +++ b/ee/query-service/rules/anomaly.go @@ -166,16 +166,9 @@ func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3. ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds(), ) - start := ts.Add(-time.Duration(r.EvalWindow())).UnixMilli() - end := ts.UnixMilli() - - if r.EvalDelay() > 0 { - start = start - int64(r.EvalDelay().Milliseconds()) - end = end - int64(r.EvalDelay().Milliseconds()) - } - // round to minute otherwise we could potentially miss data - start = start - (start % (60 * 1000)) - end = end - (end % (60 * 1000)) + st, en := r.Timestamps(ts) + start := st.UnixMilli() + end := en.UnixMilli() compositeQuery := r.Condition().CompositeQuery diff --git a/ee/query-service/rules/manager.go b/ee/query-service/rules/manager.go index bf5cbbbec117..3212031f9f3f 100644 --- a/ee/query-service/rules/manager.go +++ b/ee/query-service/rules/manager.go @@ -3,8 +3,10 @@ package rules import ( "context" "fmt" + "time" + "github.com/SigNoz/signoz/pkg/errors" basemodel "github.com/SigNoz/signoz/pkg/query-service/model" baserules "github.com/SigNoz/signoz/pkg/query-service/rules" "github.com/SigNoz/signoz/pkg/query-service/utils/labels" @@ -20,6 +22,10 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) var task baserules.Task ruleId := baserules.RuleIdFromTaskName(opts.TaskName) + evaluation, err := opts.Rule.Evaluation.GetEvaluation() + if err != nil { + return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "evaluation is invalid: %v", err) + } if opts.Rule.RuleType == ruletypes.RuleTypeThreshold { // create a threshold rule tr, err := baserules.NewThresholdRule( @@ -40,7 +46,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) rules = append(rules, tr) // create ch rule task for evalution - task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID) + task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID) } else if opts.Rule.RuleType == ruletypes.RuleTypeProm { @@ -62,7 +68,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) rules = append(rules, pr) // create promql rule task for evalution - task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID) + task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID) } else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly { // create anomaly rule @@ -84,7 +90,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) rules = append(rules, ar) // create anomaly rule task for evalution - task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID) + task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID) } else { return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold) diff --git a/pkg/query-service/rules/base_rule.go b/pkg/query-service/rules/base_rule.go index be14b9133f9b..a0ddcbf8444d 100644 --- a/pkg/query-service/rules/base_rule.go +++ b/pkg/query-service/rules/base_rule.go @@ -9,6 +9,7 @@ import ( "sync" "time" + "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/query-service/converter" "github.com/SigNoz/signoz/pkg/query-service/interfaces" "github.com/SigNoz/signoz/pkg/query-service/model" @@ -87,6 +88,8 @@ type BaseRule struct { TemporalityMap map[string]map[v3.Temporality]bool sqlstore sqlstore.SQLStore + + evaluation ruletypes.Evaluation } type RuleOption func(*BaseRule) @@ -129,6 +132,10 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader if err != nil { return nil, err } + evaluation, err := p.Evaluation.GetEvaluation() + if err != nil { + return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to get evaluation: %v", err) + } baseRule := &BaseRule{ id: id, @@ -146,6 +153,7 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader reader: reader, TemporalityMap: make(map[string]map[v3.Temporality]bool), Threshold: threshold, + evaluation: evaluation, } if baseRule.evalWindow == 0 { @@ -248,8 +256,10 @@ func (r *BaseRule) Unit() string { } func (r *BaseRule) Timestamps(ts time.Time) (time.Time, time.Time) { - start := ts.Add(-time.Duration(r.evalWindow)).UnixMilli() - end := ts.UnixMilli() + + st, en := r.evaluation.NextWindowFor(ts) + start := st.UnixMilli() + end := en.UnixMilli() if r.evalDelay > 0 { start = start - int64(r.evalDelay.Milliseconds()) diff --git a/pkg/query-service/rules/manager.go b/pkg/query-service/rules/manager.go index f80686682687..5264b28f85ff 100644 --- a/pkg/query-service/rules/manager.go +++ b/pkg/query-service/rules/manager.go @@ -12,12 +12,11 @@ import ( "go.uber.org/zap" - "errors" - "github.com/go-openapi/strfmt" "github.com/SigNoz/signoz/pkg/alertmanager" "github.com/SigNoz/signoz/pkg/cache" + "github.com/SigNoz/signoz/pkg/errors" "github.com/SigNoz/signoz/pkg/modules/organization" "github.com/SigNoz/signoz/pkg/prometheus" querierV5 "github.com/SigNoz/signoz/pkg/querier" @@ -147,6 +146,12 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) { var task Task ruleId := RuleIdFromTaskName(opts.TaskName) + + evaluation, err := opts.Rule.Evaluation.GetEvaluation() + if err != nil { + return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "evaluation is invalid: %v", err) + } + if opts.Rule.RuleType == ruletypes.RuleTypeThreshold { // create a threshold rule tr, err := NewThresholdRule( @@ -167,7 +172,7 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) { rules = append(rules, tr) // create ch rule task for evalution - task = newTask(TaskTypeCh, opts.TaskName, taskNamesuffix, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID) + task = newTask(TaskTypeCh, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID) } else if opts.Rule.RuleType == ruletypes.RuleTypeProm { @@ -189,7 +194,7 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) { rules = append(rules, pr) // create promql rule task for evalution - task = newTask(TaskTypeProm, opts.TaskName, taskNamesuffix, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID) + task = newTask(TaskTypeProm, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID) } else { return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold) @@ -400,7 +405,7 @@ func (m *Manager) editTask(_ context.Context, orgID valuer.UUID, rule *ruletypes if err != nil { zap.L().Error("loading tasks failed", zap.Error(err)) - return errors.New("error preparing rule with given parameters, previous rule set restored") + return errors.NewInvalidInputf(errors.CodeInvalidInput, "error preparing rule with given parameters, previous rule set restored") } for _, r := range newTask.Rules() { @@ -593,7 +598,7 @@ func (m *Manager) addTask(_ context.Context, orgID valuer.UUID, rule *ruletypes. if err != nil { zap.L().Error("creating rule task failed", zap.String("name", taskName), zap.Error(err)) - return errors.New("error loading rules, previous rule set restored") + return errors.NewInvalidInputf(errors.CodeInvalidInput, "error loading rules, previous rule set restored") } for _, r := range newTask.Rules() { diff --git a/pkg/query-service/rules/prom_rule.go b/pkg/query-service/rules/prom_rule.go index ea07f85e04b7..773c86a2368b 100644 --- a/pkg/query-service/rules/prom_rule.go +++ b/pkg/query-service/rules/prom_rule.go @@ -123,8 +123,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (interface{}, error) prevState := r.State() - start := ts.Add(-r.evalWindow) - end := ts + start, end := r.Timestamps(ts) interval := 60 * time.Second // TODO(srikanthccv): this should be configurable valueFormatter := formatter.FromUnit(r.Unit()) diff --git a/pkg/query-service/rules/promrule_test.go b/pkg/query-service/rules/promrule_test.go index a4e0b94d06a9..17177de622c9 100644 --- a/pkg/query-service/rules/promrule_test.go +++ b/pkg/query-service/rules/promrule_test.go @@ -25,11 +25,13 @@ func getVectorValues(vectors []ruletypes.Sample) []float64 { func TestPromRuleShouldAlert(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "Test Rule", - AlertType: ruletypes.AlertTypeMetric, - RuleType: ruletypes.RuleTypeProm, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Test Rule", + AlertType: ruletypes.AlertTypeMetric, + RuleType: ruletypes.RuleTypeProm, + Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypePromQL, diff --git a/pkg/query-service/rules/threshold_rule_test.go b/pkg/query-service/rules/threshold_rule_test.go index d6bc92c8ab44..d311a47e186e 100644 --- a/pkg/query-service/rules/threshold_rule_test.go +++ b/pkg/query-service/rules/threshold_rule_test.go @@ -31,11 +31,13 @@ import ( func TestThresholdRuleShouldAlert(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "Tricky Condition Tests", - AlertType: ruletypes.AlertTypeMetric, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Tricky Condition Tests", + AlertType: ruletypes.AlertTypeMetric, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeBuilder, @@ -886,11 +888,13 @@ func TestNormalizeLabelName(t *testing.T) { func TestPrepareLinksToLogs(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "Tricky Condition Tests", - AlertType: ruletypes.AlertTypeLogs, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Tricky Condition Tests", + AlertType: ruletypes.AlertTypeLogs, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeBuilder, @@ -938,11 +942,13 @@ func TestPrepareLinksToLogs(t *testing.T) { func TestPrepareLinksToLogsV5(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "Tricky Condition Tests", - AlertType: ruletypes.AlertTypeLogs, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Tricky Condition Tests", + AlertType: ruletypes.AlertTypeLogs, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeBuilder, @@ -997,11 +1003,13 @@ func TestPrepareLinksToLogsV5(t *testing.T) { func TestPrepareLinksToTracesV5(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "Tricky Condition Tests", - AlertType: ruletypes.AlertTypeTraces, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Tricky Condition Tests", + AlertType: ruletypes.AlertTypeTraces, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeBuilder, @@ -1056,11 +1064,13 @@ func TestPrepareLinksToTracesV5(t *testing.T) { func TestPrepareLinksToTraces(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "Links to traces test", - AlertType: ruletypes.AlertTypeTraces, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Links to traces test", + AlertType: ruletypes.AlertTypeTraces, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeBuilder, @@ -1108,11 +1118,13 @@ func TestPrepareLinksToTraces(t *testing.T) { func TestThresholdRuleLabelNormalization(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "Tricky Condition Tests", - AlertType: ruletypes.AlertTypeMetric, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Tricky Condition Tests", + AlertType: ruletypes.AlertTypeMetric, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeBuilder, @@ -1214,11 +1226,13 @@ func TestThresholdRuleLabelNormalization(t *testing.T) { func TestThresholdRuleEvalDelay(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "Test Eval Delay", - AlertType: ruletypes.AlertTypeMetric, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Test Eval Delay", + AlertType: ruletypes.AlertTypeMetric, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeClickHouseSQL, @@ -1275,11 +1289,13 @@ func TestThresholdRuleEvalDelay(t *testing.T) { func TestThresholdRuleClickHouseTmpl(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "Tricky Condition Tests", - AlertType: ruletypes.AlertTypeMetric, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Tricky Condition Tests", + AlertType: ruletypes.AlertTypeMetric, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeClickHouseSQL, @@ -1342,11 +1358,13 @@ func (m *queryMatcherAny) Match(x string, y string) error { func TestThresholdRuleUnitCombinations(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "Units test", - AlertType: ruletypes.AlertTypeMetric, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Units test", + AlertType: ruletypes.AlertTypeMetric, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeBuilder, @@ -1535,11 +1553,13 @@ func TestThresholdRuleUnitCombinations(t *testing.T) { func TestThresholdRuleNoData(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "No data test", - AlertType: ruletypes.AlertTypeMetric, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "No data test", + AlertType: ruletypes.AlertTypeMetric, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeBuilder, @@ -1638,11 +1658,13 @@ func TestThresholdRuleNoData(t *testing.T) { func TestThresholdRuleTracesLink(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "Traces link test", - AlertType: ruletypes.AlertTypeTraces, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Traces link test", + AlertType: ruletypes.AlertTypeTraces, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeBuilder, @@ -1763,11 +1785,13 @@ func TestThresholdRuleTracesLink(t *testing.T) { func TestThresholdRuleLogsLink(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "Logs link test", - AlertType: ruletypes.AlertTypeLogs, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Logs link test", + AlertType: ruletypes.AlertTypeLogs, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeBuilder, @@ -1901,11 +1925,13 @@ func TestThresholdRuleLogsLink(t *testing.T) { func TestThresholdRuleShiftBy(t *testing.T) { target := float64(10) postableRule := ruletypes.PostableRule{ - AlertName: "Logs link test", - AlertType: ruletypes.AlertTypeLogs, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Logs link test", + AlertType: ruletypes.AlertTypeLogs, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ Thresholds: &ruletypes.RuleThresholdData{ Kind: ruletypes.BasicThresholdKind, @@ -1973,11 +1999,13 @@ func TestThresholdRuleShiftBy(t *testing.T) { func TestMultipleThresholdRule(t *testing.T) { postableRule := ruletypes.PostableRule{ - AlertName: "Mulitple threshold test", - AlertType: ruletypes.AlertTypeMetric, - RuleType: ruletypes.RuleTypeThreshold, - EvalWindow: ruletypes.Duration(5 * time.Minute), - Frequency: ruletypes.Duration(1 * time.Minute), + AlertName: "Mulitple threshold test", + AlertType: ruletypes.AlertTypeMetric, + RuleType: ruletypes.RuleTypeThreshold, + Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{ + EvalWindow: ruletypes.Duration(5 * time.Minute), + Frequency: ruletypes.Duration(1 * time.Minute), + }}, RuleCondition: &ruletypes.RuleCondition{ CompositeQuery: &v3.CompositeQuery{ QueryType: v3.QueryTypeBuilder, diff --git a/pkg/types/ruletypes/api_params.go b/pkg/types/ruletypes/api_params.go index f4ad6b55cd2b..9285b070fbe6 100644 --- a/pkg/types/ruletypes/api_params.go +++ b/pkg/types/ruletypes/api_params.go @@ -50,6 +50,8 @@ type PostableRule struct { PreferredChannels []string `json:"preferredChannels,omitempty"` Version string `json:"version,omitempty"` + + Evaluation *EvaluationEnvelope `yaml:"evaluation,omitempty" json:"evaluation,omitempty"` } func (r *PostableRule) processRuleDefaults() error { @@ -98,6 +100,9 @@ func (r *PostableRule) processRuleDefaults() error { r.RuleCondition.Thresholds = &thresholdData } } + if r.Evaluation == nil { + r.Evaluation = &EvaluationEnvelope{RollingEvaluation, RollingWindow{EvalWindow: r.EvalWindow, Frequency: r.Frequency}} + } return r.Validate() } diff --git a/pkg/types/ruletypes/evaluation.go b/pkg/types/ruletypes/evaluation.go new file mode 100644 index 000000000000..7677bbbbcbab --- /dev/null +++ b/pkg/types/ruletypes/evaluation.go @@ -0,0 +1,287 @@ +package ruletypes + +import ( + "encoding/json" + "time" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/valuer" +) + +type EvaluationKind struct { + valuer.String +} + +var ( + RollingEvaluation = EvaluationKind{valuer.NewString("rolling")} + CumulativeEvaluation = EvaluationKind{valuer.NewString("cumulative")} +) + +type Evaluation interface { + NextWindowFor(curr time.Time) (time.Time, time.Time) + GetFrequency() Duration +} + +type RollingWindow struct { + EvalWindow Duration `json:"evalWindow"` + Frequency Duration `json:"frequency"` +} + +func (rollingWindow RollingWindow) Validate() error { + if rollingWindow.EvalWindow <= 0 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "evalWindow must be greater than zero") + } + if rollingWindow.Frequency <= 0 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "frequency must be greater than zero") + } + return nil +} + +func (rollingWindow RollingWindow) NextWindowFor(curr time.Time) (time.Time, time.Time) { + return curr.Add(time.Duration(-rollingWindow.EvalWindow)), curr +} + +func (rollingWindow RollingWindow) GetFrequency() Duration { + return rollingWindow.Frequency +} + +type CumulativeWindow struct { + Schedule CumulativeSchedule `json:"schedule"` + Frequency Duration `json:"frequency"` + Timezone string `json:"timezone"` +} + +type CumulativeSchedule struct { + Type ScheduleType `json:"type"` + Minute *int `json:"minute,omitempty"` // 0-59, for all types + Hour *int `json:"hour,omitempty"` // 0-23, for daily/weekly/monthly + Day *int `json:"day,omitempty"` // 1-31, for monthly + Weekday *int `json:"weekday,omitempty"` // 0-6 (Sunday=0), for weekly +} + +type ScheduleType struct { + valuer.String +} + +var ( + ScheduleTypeHourly = ScheduleType{valuer.NewString("hourly")} + ScheduleTypeDaily = ScheduleType{valuer.NewString("daily")} + ScheduleTypeWeekly = ScheduleType{valuer.NewString("weekly")} + ScheduleTypeMonthly = ScheduleType{valuer.NewString("monthly")} +) + +func (cumulativeWindow CumulativeWindow) Validate() error { + // Validate schedule + if err := cumulativeWindow.Schedule.Validate(); err != nil { + return err + } + + if _, err := time.LoadLocation(cumulativeWindow.Timezone); err != nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "timezone is invalid") + } + if cumulativeWindow.Frequency <= 0 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "frequency must be greater than zero") + } + return nil +} + +func (cs CumulativeSchedule) Validate() error { + switch cs.Type { + case ScheduleTypeHourly: + if cs.Minute == nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "minute must be specified for hourly schedule") + } + if *cs.Minute < 0 || *cs.Minute > 59 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "minute must be between 0 and 59") + } + case ScheduleTypeDaily: + if cs.Hour == nil || cs.Minute == nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "hour and minute must be specified for daily schedule") + } + if *cs.Hour < 0 || *cs.Hour > 23 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "hour must be between 0 and 23") + } + if *cs.Minute < 0 || *cs.Minute > 59 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "minute must be between 0 and 59") + } + case ScheduleTypeWeekly: + if cs.Weekday == nil || cs.Hour == nil || cs.Minute == nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "weekday, hour and minute must be specified for weekly schedule") + } + if *cs.Weekday < 0 || *cs.Weekday > 6 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "weekday must be between 0 and 6 (Sunday=0)") + } + if *cs.Hour < 0 || *cs.Hour > 23 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "hour must be between 0 and 23") + } + if *cs.Minute < 0 || *cs.Minute > 59 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "minute must be between 0 and 59") + } + case ScheduleTypeMonthly: + if cs.Day == nil || cs.Hour == nil || cs.Minute == nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "day, hour and minute must be specified for monthly schedule") + } + if *cs.Day < 1 || *cs.Day > 31 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "day must be between 1 and 31") + } + if *cs.Hour < 0 || *cs.Hour > 23 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "hour must be between 0 and 23") + } + if *cs.Minute < 0 || *cs.Minute > 59 { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "minute must be between 0 and 59") + } + default: + return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid schedule type") + } + return nil +} + +func (cumulativeWindow CumulativeWindow) NextWindowFor(curr time.Time) (time.Time, time.Time) { + loc := time.UTC + if cumulativeWindow.Timezone != "" { + if tz, err := time.LoadLocation(cumulativeWindow.Timezone); err == nil { + loc = tz + } + } + + currInTZ := curr.In(loc) + windowStart := cumulativeWindow.getLastScheduleTime(currInTZ, loc) + + return windowStart.In(time.UTC), currInTZ.In(time.UTC) +} + +func (cw CumulativeWindow) getLastScheduleTime(curr time.Time, loc *time.Location) time.Time { + schedule := cw.Schedule + + switch schedule.Type { + case ScheduleTypeHourly: + // Find the most recent hour boundary with the specified minute + minute := *schedule.Minute + candidate := time.Date(curr.Year(), curr.Month(), curr.Day(), curr.Hour(), minute, 0, 0, loc) + if candidate.After(curr) { + candidate = candidate.Add(-time.Hour) + } + return candidate + + case ScheduleTypeDaily: + // Find the most recent day boundary with the specified hour and minute + hour := *schedule.Hour + minute := *schedule.Minute + candidate := time.Date(curr.Year(), curr.Month(), curr.Day(), hour, minute, 0, 0, loc) + if candidate.After(curr) { + candidate = candidate.AddDate(0, 0, -1) + } + return candidate + + case ScheduleTypeWeekly: + weekday := time.Weekday(*schedule.Weekday) + hour := *schedule.Hour + minute := *schedule.Minute + + // Calculate days to subtract to reach the target weekday + daysBack := int(curr.Weekday() - weekday) + if daysBack < 0 { + daysBack += 7 + } + + candidate := time.Date(curr.Year(), curr.Month(), curr.Day(), hour, minute, 0, 0, loc).AddDate(0, 0, -daysBack) + if candidate.After(curr) { + candidate = candidate.AddDate(0, 0, -7) + } + return candidate + + case ScheduleTypeMonthly: + // Find the most recent month boundary with the specified day, hour and minute + targetDay := *schedule.Day + hour := *schedule.Hour + minute := *schedule.Minute + + // Try current month first + lastDayOfCurrentMonth := time.Date(curr.Year(), curr.Month()+1, 0, 0, 0, 0, 0, loc).Day() + dayInCurrentMonth := targetDay + if targetDay > lastDayOfCurrentMonth { + dayInCurrentMonth = lastDayOfCurrentMonth + } + + candidate := time.Date(curr.Year(), curr.Month(), dayInCurrentMonth, hour, minute, 0, 0, loc) + if candidate.After(curr) { + prevMonth := curr.AddDate(0, -1, 0) + lastDayOfPrevMonth := time.Date(prevMonth.Year(), prevMonth.Month()+1, 0, 0, 0, 0, 0, loc).Day() + dayInPrevMonth := targetDay + if targetDay > lastDayOfPrevMonth { + dayInPrevMonth = lastDayOfPrevMonth + } + candidate = time.Date(prevMonth.Year(), prevMonth.Month(), dayInPrevMonth, hour, minute, 0, 0, loc) + } + return candidate + + default: + return curr + } +} + +func (cumulativeWindow CumulativeWindow) GetFrequency() Duration { + return cumulativeWindow.Frequency +} + +type EvaluationEnvelope struct { + Kind EvaluationKind `json:"kind"` + Spec any `json:"spec"` +} + +func (e *EvaluationEnvelope) UnmarshalJSON(data []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to unmarshal evaluation: %v", err) + } + if err := json.Unmarshal(raw["kind"], &e.Kind); err != nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to unmarshal evaluation kind: %v", err) + } + switch e.Kind { + case RollingEvaluation: + var rollingWindow RollingWindow + if err := json.Unmarshal(raw["spec"], &rollingWindow); err != nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to unmarshal rolling window: %v", err) + } + err := rollingWindow.Validate() + if err != nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to validate rolling window: %v", err) + } + e.Spec = rollingWindow + case CumulativeEvaluation: + var cumulativeWindow CumulativeWindow + if err := json.Unmarshal(raw["spec"], &cumulativeWindow); err != nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to unmarshal cumulative window: %v", err) + } + err := cumulativeWindow.Validate() + if err != nil { + return errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to validate cumulative window: %v", err) + } + e.Spec = cumulativeWindow + + default: + return errors.NewInvalidInputf(errors.CodeUnsupported, "unknown evaluation kind") + } + + return nil +} + +func (e *EvaluationEnvelope) GetEvaluation() (Evaluation, error) { + if e.Kind.IsZero() { + e.Kind = RollingEvaluation + } + + switch e.Kind { + case RollingEvaluation: + if rolling, ok := e.Spec.(RollingWindow); ok { + return rolling, nil + } + case CumulativeEvaluation: + if cumulative, ok := e.Spec.(CumulativeWindow); ok { + return cumulative, nil + } + default: + return nil, errors.NewInvalidInputf(errors.CodeUnsupported, "unknown evaluation kind") + } + return nil, errors.NewInvalidInputf(errors.CodeUnsupported, "unknown evaluation kind") +} diff --git a/pkg/types/ruletypes/evaluation_test.go b/pkg/types/ruletypes/evaluation_test.go new file mode 100644 index 000000000000..aded4c10e04a --- /dev/null +++ b/pkg/types/ruletypes/evaluation_test.go @@ -0,0 +1,878 @@ +package ruletypes + +import ( + "encoding/json" + "testing" + "time" +) + +func TestRollingWindow_EvaluationTime(t *testing.T) { + tests := []struct { + name string + evalWindow Duration + current time.Time + wantStart time.Time + wantEnd time.Time + }{ + { + name: "5 minute rolling window", + evalWindow: Duration(5 * time.Minute), + current: time.Date(2023, 12, 1, 12, 30, 0, 0, time.UTC), + wantStart: time.Date(2023, 12, 1, 12, 25, 0, 0, time.UTC), + wantEnd: time.Date(2023, 12, 1, 12, 30, 0, 0, time.UTC), + }, + { + name: "1 hour rolling window", + evalWindow: Duration(1 * time.Hour), + current: time.Date(2023, 12, 1, 15, 45, 30, 0, time.UTC), + wantStart: time.Date(2023, 12, 1, 14, 45, 30, 0, time.UTC), + wantEnd: time.Date(2023, 12, 1, 15, 45, 30, 0, time.UTC), + }, + { + name: "30 second rolling window", + evalWindow: Duration(30 * time.Second), + current: time.Date(2023, 12, 1, 12, 30, 15, 0, time.UTC), + wantStart: time.Date(2023, 12, 1, 12, 29, 45, 0, time.UTC), + wantEnd: time.Date(2023, 12, 1, 12, 30, 15, 0, time.UTC), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + rw := &RollingWindow{ + EvalWindow: tt.evalWindow, + Frequency: Duration(1 * time.Minute), + } + + gotStart, gotEnd := rw.NextWindowFor(tt.current) + if !gotStart.Equal(tt.wantStart) { + t.Errorf("RollingWindow.NextWindowFor() start time = %v, want %v", gotStart, tt.wantStart) + } + if !gotEnd.Equal(tt.wantEnd) { + t.Errorf("RollingWindow.NextWindowFor() end time = %v, want %v", gotEnd, tt.wantEnd) + } + }) + } +} + +func TestCumulativeWindow_NewScheduleSystem(t *testing.T) { + tests := []struct { + name string + window CumulativeWindow + current time.Time + wantErr bool + }{ + { + name: "hourly schedule - minute 15", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeHourly, + Minute: intPtr(15), + }, + Frequency: Duration(5 * time.Minute), + Timezone: "UTC", + }, + current: time.Date(2025, 3, 15, 14, 30, 0, 0, time.UTC), + wantErr: false, + }, + { + name: "daily schedule - 9:30 AM IST", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeDaily, + Hour: intPtr(9), + Minute: intPtr(30), + }, + Frequency: Duration(1 * time.Hour), + Timezone: "Asia/Kolkata", + }, + current: time.Date(2025, 3, 15, 15, 30, 0, 0, time.UTC), + wantErr: false, + }, + { + name: "weekly schedule - Monday 2:00 PM", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeWeekly, + Weekday: intPtr(1), // Monday + Hour: intPtr(14), + Minute: intPtr(0), + }, + Frequency: Duration(24 * time.Hour), + Timezone: "America/New_York", + }, + current: time.Date(2025, 3, 18, 19, 0, 0, 0, time.UTC), // Tuesday + wantErr: false, + }, + { + name: "monthly schedule - 1st at midnight", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeMonthly, + Day: intPtr(1), + Hour: intPtr(0), + Minute: intPtr(0), + }, + Frequency: Duration(24 * time.Hour), + Timezone: "UTC", + }, + current: time.Date(2025, 3, 15, 12, 0, 0, 0, time.UTC), + wantErr: false, + }, + { + name: "invalid schedule - missing minute for hourly", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeHourly, + }, + Frequency: Duration(5 * time.Minute), + Timezone: "UTC", + }, + current: time.Date(2025, 3, 15, 14, 30, 0, 0, time.UTC), + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Test validation + err := tt.window.Validate() + if (err != nil) != tt.wantErr { + t.Errorf("CumulativeWindow.Validate() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if !tt.wantErr { + // Test NextWindowFor + start, end := tt.window.NextWindowFor(tt.current) + + // Basic validation + if start.After(end) { + t.Errorf("Window start should not be after end: start=%v, end=%v", start, end) + } + + if end.After(tt.current) { + t.Errorf("Window end should not be after current time: end=%v, current=%v", end, tt.current) + } + } + }) + } +} + +func intPtr(i int) *int { + return &i +} + +func TestCumulativeWindow_NextWindowFor(t *testing.T) { + tests := []struct { + name string + window CumulativeWindow + current time.Time + wantStart time.Time + wantEnd time.Time + }{ + // Hourly schedule tests + { + name: "hourly - current at exact minute", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeHourly, + Minute: intPtr(30), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 15, 14, 30, 0, 0, time.UTC), + wantStart: time.Date(2025, 3, 15, 14, 30, 0, 0, time.UTC), + wantEnd: time.Date(2025, 3, 15, 14, 30, 0, 0, time.UTC), + }, + { + name: "hourly - current after scheduled minute", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeHourly, + Minute: intPtr(15), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 15, 14, 45, 0, 0, time.UTC), + wantStart: time.Date(2025, 3, 15, 14, 15, 0, 0, time.UTC), + wantEnd: time.Date(2025, 3, 15, 14, 45, 0, 0, time.UTC), + }, + { + name: "hourly - current before scheduled minute", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeHourly, + Minute: intPtr(30), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 15, 14, 15, 0, 0, time.UTC), + wantStart: time.Date(2025, 3, 15, 13, 30, 0, 0, time.UTC), // Previous hour + wantEnd: time.Date(2025, 3, 15, 14, 15, 0, 0, time.UTC), + }, + { + name: "hourly - current before scheduled minute", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeHourly, + Minute: intPtr(30), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 15, 13, 14, 0, 0, time.UTC), + wantStart: time.Date(2025, 3, 15, 12, 30, 0, 0, time.UTC), // Previous hour + wantEnd: time.Date(2025, 3, 15, 13, 14, 0, 0, time.UTC), + }, + { + name: "hourly - current before scheduled minute", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeHourly, + Minute: intPtr(30), + }, + Timezone: "Asia/Kolkata", + }, + current: time.Date(2025, 3, 15, 13, 14, 0, 0, time.UTC), + wantStart: time.Date(2025, 3, 15, 13, 00, 0, 0, time.UTC), // Previous hour + wantEnd: time.Date(2025, 3, 15, 13, 14, 0, 0, time.UTC), + }, + + // Daily schedule tests + { + name: "daily - current at exact time", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeDaily, + Hour: intPtr(9), + Minute: intPtr(30), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 15, 9, 30, 0, 0, time.UTC), + wantStart: time.Date(2025, 3, 15, 9, 30, 0, 0, time.UTC), + wantEnd: time.Date(2025, 3, 15, 9, 30, 0, 0, time.UTC), + }, + { + name: "daily - current after scheduled time", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeDaily, + Hour: intPtr(9), + Minute: intPtr(30), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 15, 15, 45, 0, 0, time.UTC), + wantStart: time.Date(2025, 3, 15, 9, 30, 0, 0, time.UTC), + wantEnd: time.Date(2025, 3, 15, 15, 45, 0, 0, time.UTC), + }, + { + name: "daily - current before scheduled time", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeDaily, + Hour: intPtr(9), + Minute: intPtr(30), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 15, 8, 15, 0, 0, time.UTC), + wantStart: time.Date(2025, 3, 14, 9, 30, 0, 0, time.UTC), // Previous day + wantEnd: time.Date(2025, 3, 15, 8, 15, 0, 0, time.UTC), + }, + { + name: "daily - with timezone IST", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeDaily, + Hour: intPtr(9), + Minute: intPtr(30), + }, + Timezone: "Asia/Kolkata", + }, + current: time.Date(2025, 3, 15, 15, 30, 0, 0, time.UTC), // 9:00 PM IST + wantStart: time.Date(2025, 3, 15, 4, 0, 0, 0, time.UTC), // 9:30 AM IST in UTC + wantEnd: time.Date(2025, 3, 15, 15, 30, 0, 0, time.UTC), + }, + + // Weekly schedule tests + { + name: "weekly - current on scheduled day at exact time", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeWeekly, + Weekday: intPtr(1), // Monday + Hour: intPtr(14), + Minute: intPtr(0), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 17, 14, 0, 0, 0, time.UTC), // Monday + wantStart: time.Date(2025, 3, 17, 14, 0, 0, 0, time.UTC), + wantEnd: time.Date(2025, 3, 17, 14, 0, 0, 0, time.UTC), + }, + { + name: "weekly - current on different day", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeWeekly, + Weekday: intPtr(1), // Monday + Hour: intPtr(14), + Minute: intPtr(0), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 19, 10, 30, 0, 0, time.UTC), // Wednesday + wantStart: time.Date(2025, 3, 17, 14, 0, 0, 0, time.UTC), // Previous Monday + wantEnd: time.Date(2025, 3, 19, 10, 30, 0, 0, time.UTC), + }, + { + name: "weekly - current before scheduled time on same day", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeWeekly, + Weekday: intPtr(2), // Tuesday + Hour: intPtr(14), + Minute: intPtr(0), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 18, 10, 0, 0, 0, time.UTC), // Tuesday before 2 PM + wantStart: time.Date(2025, 3, 11, 14, 0, 0, 0, time.UTC), // Previous Tuesday + wantEnd: time.Date(2025, 3, 18, 10, 0, 0, 0, time.UTC), + }, + + // Monthly schedule tests + { + name: "monthly - current on scheduled day at exact time", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeMonthly, + Day: intPtr(15), + Hour: intPtr(12), + Minute: intPtr(0), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 15, 12, 0, 0, 0, time.UTC), + wantStart: time.Date(2025, 3, 15, 12, 0, 0, 0, time.UTC), + wantEnd: time.Date(2025, 3, 15, 12, 0, 0, 0, time.UTC), + }, + { + name: "monthly - current after scheduled time", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeMonthly, + Day: intPtr(1), + Hour: intPtr(0), + Minute: intPtr(0), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 15, 16, 30, 0, 0, time.UTC), + wantStart: time.Date(2025, 3, 1, 0, 0, 0, 0, time.UTC), + wantEnd: time.Date(2025, 3, 15, 16, 30, 0, 0, time.UTC), + }, + { + name: "monthly - current before scheduled day", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeMonthly, + Day: intPtr(15), + Hour: intPtr(12), + Minute: intPtr(0), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 10, 10, 0, 0, 0, time.UTC), + wantStart: time.Date(2025, 2, 15, 12, 0, 0, 0, time.UTC), // Previous month + wantEnd: time.Date(2025, 3, 10, 10, 0, 0, 0, time.UTC), + }, + { + name: "monthly - day 31 in february (edge case)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeMonthly, + Day: intPtr(31), + Hour: intPtr(12), + Minute: intPtr(0), + }, + Timezone: "UTC", + }, + current: time.Date(2025, 3, 15, 10, 0, 0, 0, time.UTC), + wantStart: time.Date(2025, 2, 28, 12, 0, 0, 0, time.UTC), // Feb 28 (last day of Feb) + wantEnd: time.Date(2025, 3, 15, 10, 0, 0, 0, time.UTC), + }, + + // Comprehensive timezone-based test cases + { + name: "Asia/Tokyo timezone - hourly schedule", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeHourly, + Minute: intPtr(45), + }, + Timezone: "Asia/Tokyo", + }, + current: time.Date(2023, 12, 15, 2, 30, 0, 0, time.UTC), // 11:30 AM JST + wantStart: time.Date(2023, 12, 15, 1, 45, 0, 0, time.UTC), // 10:45 AM JST in UTC + wantEnd: time.Date(2023, 12, 15, 2, 30, 0, 0, time.UTC), + }, + { + name: "America/New_York timezone - daily schedule (EST)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeDaily, + Hour: intPtr(8), // 8 AM EST + Minute: intPtr(0), + }, + Timezone: "America/New_York", + }, + current: time.Date(2023, 12, 15, 20, 30, 0, 0, time.UTC), // 3:30 PM EST + wantStart: time.Date(2023, 12, 15, 13, 0, 0, 0, time.UTC), // 8 AM EST in UTC + wantEnd: time.Date(2023, 12, 15, 20, 30, 0, 0, time.UTC), + }, + { + name: "Europe/London timezone - weekly schedule (GMT)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeWeekly, + Weekday: intPtr(1), // Monday + Hour: intPtr(12), + Minute: intPtr(0), + }, + Timezone: "Europe/London", + }, + current: time.Date(2023, 12, 15, 15, 0, 0, 0, time.UTC), // Friday 3 PM GMT + wantStart: time.Date(2023, 12, 11, 12, 0, 0, 0, time.UTC), // Previous Monday 12 PM GMT + wantEnd: time.Date(2023, 12, 15, 15, 0, 0, 0, time.UTC), + }, + { + name: "Australia/Sydney timezone - monthly schedule (AEDT)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeMonthly, + Day: intPtr(1), + Hour: intPtr(0), // Midnight AEDT + Minute: intPtr(0), + }, + Timezone: "Australia/Sydney", + }, + current: time.Date(2023, 12, 15, 5, 0, 0, 0, time.UTC), // 4 PM AEDT on 15th + wantStart: time.Date(2023, 11, 30, 13, 0, 0, 0, time.UTC), // Midnight AEDT on Dec 1st in UTC (Nov 30 13:00 UTC) + wantEnd: time.Date(2023, 12, 15, 5, 0, 0, 0, time.UTC), + }, + { + name: "Pacific/Honolulu timezone - hourly schedule (HST)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeHourly, + Minute: intPtr(30), + }, + Timezone: "Pacific/Honolulu", + }, + current: time.Date(2023, 12, 15, 22, 45, 0, 0, time.UTC), // 12:45 PM HST + wantStart: time.Date(2023, 12, 15, 22, 30, 0, 0, time.UTC), // 12:30 PM HST in UTC + wantEnd: time.Date(2023, 12, 15, 22, 45, 0, 0, time.UTC), + }, + { + name: "America/Los_Angeles timezone - DST transition daily", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeDaily, + Hour: intPtr(2), // 2 AM PST/PDT + Minute: intPtr(0), + }, + Timezone: "America/Los_Angeles", + }, + current: time.Date(2023, 3, 12, 15, 0, 0, 0, time.UTC), // Day after DST starts + wantStart: time.Date(2023, 3, 12, 9, 0, 0, 0, time.UTC), // 2 AM PDT in UTC (PDT = UTC-7) + wantEnd: time.Date(2023, 3, 12, 15, 0, 0, 0, time.UTC), + }, + { + name: "Europe/Berlin timezone - weekly schedule (CET)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeWeekly, + Weekday: intPtr(5), // Friday + Hour: intPtr(16), // 4 PM CET + Minute: intPtr(30), + }, + Timezone: "Europe/Berlin", + }, + current: time.Date(2023, 12, 18, 10, 0, 0, 0, time.UTC), // Monday 11 AM CET + wantStart: time.Date(2023, 12, 15, 15, 30, 0, 0, time.UTC), // Previous Friday 4:30 PM CET + wantEnd: time.Date(2023, 12, 18, 10, 0, 0, 0, time.UTC), + }, + { + name: "Asia/Kolkata timezone - monthly edge case (IST)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeMonthly, + Day: intPtr(31), // 31st (edge case for Feb) + Hour: intPtr(23), + Minute: intPtr(59), + }, + Timezone: "Asia/Kolkata", + }, + current: time.Date(2023, 3, 10, 12, 0, 0, 0, time.UTC), // March 10th 5:30 PM IST + wantStart: time.Date(2023, 2, 28, 18, 29, 0, 0, time.UTC), // Feb 28 11:59 PM IST (last day of Feb) + wantEnd: time.Date(2023, 3, 10, 12, 0, 0, 0, time.UTC), + }, + { + name: "America/Chicago timezone - hourly across midnight (CST)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeHourly, + Minute: intPtr(0), // Top of hour + }, + Timezone: "America/Chicago", + }, + current: time.Date(2023, 12, 15, 6, 30, 0, 0, time.UTC), // 12:30 AM CST + wantStart: time.Date(2023, 12, 15, 6, 0, 0, 0, time.UTC), // Midnight CST in UTC + wantEnd: time.Date(2023, 12, 15, 6, 30, 0, 0, time.UTC), + }, + + // Boundary condition test cases + { + name: "boundary - end of year transition (Dec 31 to Jan 1)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeDaily, + Hour: intPtr(0), + Minute: intPtr(0), + }, + Timezone: "UTC", + }, + current: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC), // Jan 1st noon + wantStart: time.Date(2024, 1, 1, 0, 0, 0, 0, time.UTC), // Jan 1st midnight + wantEnd: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC), + }, + { + name: "boundary - leap year Feb 29th monthly schedule", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeMonthly, + Day: intPtr(29), + Hour: intPtr(15), + Minute: intPtr(30), + }, + Timezone: "UTC", + }, + current: time.Date(2024, 3, 10, 10, 0, 0, 0, time.UTC), // March 10th (leap year) + wantStart: time.Date(2024, 2, 29, 15, 30, 0, 0, time.UTC), // Feb 29th exists in leap year + wantEnd: time.Date(2024, 3, 10, 10, 0, 0, 0, time.UTC), + }, + { + name: "boundary - non-leap year Feb 29th request (fallback to Feb 28th)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeMonthly, + Day: intPtr(29), + Hour: intPtr(15), + Minute: intPtr(30), + }, + Timezone: "UTC", + }, + current: time.Date(2023, 3, 10, 10, 0, 0, 0, time.UTC), // March 10th (non-leap year) + wantStart: time.Date(2023, 2, 28, 15, 30, 0, 0, time.UTC), // Feb 28th (fallback) + wantEnd: time.Date(2023, 3, 10, 10, 0, 0, 0, time.UTC), + }, + { + name: "boundary - day 31 in April (30-day month fallback)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeMonthly, + Day: intPtr(31), + Hour: intPtr(12), + Minute: intPtr(0), + }, + Timezone: "UTC", + }, + current: time.Date(2023, 5, 15, 10, 0, 0, 0, time.UTC), // May 15th + wantStart: time.Date(2023, 4, 30, 12, 0, 0, 0, time.UTC), // April 30th (fallback from 31st) + wantEnd: time.Date(2023, 5, 15, 10, 0, 0, 0, time.UTC), + }, + { + name: "boundary - weekly Sunday to Monday transition", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeWeekly, + Weekday: intPtr(0), // Sunday + Hour: intPtr(23), + Minute: intPtr(59), + }, + Timezone: "UTC", + }, + current: time.Date(2023, 12, 11, 1, 0, 0, 0, time.UTC), // Monday 1 AM + wantStart: time.Date(2023, 12, 10, 23, 59, 0, 0, time.UTC), // Previous Sunday 11:59 PM + wantEnd: time.Date(2023, 12, 11, 1, 0, 0, 0, time.UTC), + }, + { + name: "boundary - hourly minute 59 to minute 0 transition", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeHourly, + Minute: intPtr(59), + }, + Timezone: "UTC", + }, + current: time.Date(2023, 12, 15, 14, 5, 0, 0, time.UTC), // 14:05 + wantStart: time.Date(2023, 12, 15, 13, 59, 0, 0, time.UTC), // 13:59 (previous hour) + wantEnd: time.Date(2023, 12, 15, 14, 5, 0, 0, time.UTC), + }, + { + name: "boundary - DST spring forward (2 AM doesn't exist)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeDaily, + Hour: intPtr(2), // 2 AM (skipped during DST) + Minute: intPtr(30), + }, + Timezone: "America/New_York", + }, + current: time.Date(2023, 3, 12, 15, 0, 0, 0, time.UTC), // Day DST starts + wantStart: time.Date(2023, 3, 12, 6, 30, 0, 0, time.UTC), // Same day 2:30 AM EDT (adjusted for DST) + wantEnd: time.Date(2023, 3, 12, 15, 0, 0, 0, time.UTC), + }, + { + name: "boundary - DST fall back (2 AM occurs twice)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeDaily, + Hour: intPtr(2), // 2 AM (occurs twice) + Minute: intPtr(30), + }, + Timezone: "America/New_York", + }, + current: time.Date(2023, 11, 5, 15, 0, 0, 0, time.UTC), // Day DST ends + wantStart: time.Date(2023, 11, 5, 7, 30, 0, 0, time.UTC), // Same day 2:30 AM EST (after fall back) + wantEnd: time.Date(2023, 11, 5, 15, 0, 0, 0, time.UTC), + }, + { + name: "boundary - month transition January to February", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeMonthly, + Day: intPtr(31), + Hour: intPtr(0), + Minute: intPtr(0), + }, + Timezone: "UTC", + }, + current: time.Date(2023, 2, 15, 12, 0, 0, 0, time.UTC), // February 15th + wantStart: time.Date(2023, 1, 31, 0, 0, 0, 0, time.UTC), // January 31st (exists) + wantEnd: time.Date(2023, 2, 15, 12, 0, 0, 0, time.UTC), + }, + { + name: "boundary - extreme timezone offset (+14 hours)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeDaily, + Hour: intPtr(12), + Minute: intPtr(0), + }, + Timezone: "Pacific/Kiritimati", // UTC+14 + }, + current: time.Date(2023, 12, 15, 5, 0, 0, 0, time.UTC), // 7 PM local time + wantStart: time.Date(2023, 12, 14, 22, 0, 0, 0, time.UTC), // 12 PM local time (previous day in UTC) + wantEnd: time.Date(2023, 12, 15, 5, 0, 0, 0, time.UTC), + }, + { + name: "boundary - extreme timezone offset (-12 hours)", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeDaily, + Hour: intPtr(12), + Minute: intPtr(0), + }, + Timezone: "Etc/GMT+12", // UTC-12 (use standard timezone name) + }, + current: time.Date(2023, 12, 15, 5, 0, 0, 0, time.UTC), // 5 PM previous day local time + wantStart: time.Date(2023, 12, 15, 0, 0, 0, 0, time.UTC), // 12 PM local time (same day in UTC) + wantEnd: time.Date(2023, 12, 15, 5, 0, 0, 0, time.UTC), + }, + { + name: "boundary - week boundary Saturday to Sunday", + window: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeWeekly, + Weekday: intPtr(6), // Saturday + Hour: intPtr(0), + Minute: intPtr(0), + }, + Timezone: "UTC", + }, + current: time.Date(2023, 12, 17, 12, 0, 0, 0, time.UTC), // Sunday noon + wantStart: time.Date(2023, 12, 16, 0, 0, 0, 0, time.UTC), // Saturday midnight + wantEnd: time.Date(2023, 12, 17, 12, 0, 0, 0, time.UTC), + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotStart, gotEnd := tt.window.NextWindowFor(tt.current) + + if !gotStart.Equal(tt.wantStart) { + t.Errorf("NextWindowFor() start = %v, want %v", gotStart, tt.wantStart) + } + if !gotEnd.Equal(tt.wantEnd) { + t.Errorf("NextWindowFor() end = %v, want %v", gotEnd, tt.wantEnd) + } + + // Validate basic invariants + if gotStart.After(gotEnd) { + t.Errorf("Window start should not be after end: start=%v, end=%v", gotStart, gotEnd) + } + if gotEnd.After(tt.current) { + t.Errorf("Window end should not be after current time: end=%v, current=%v", gotEnd, tt.current) + } + + duration := gotEnd.Sub(gotStart) + + // Validate window length is reasonable + if duration < 0 { + t.Errorf("Window duration should not be negative: %v", duration) + } + if duration > 366*24*time.Hour { + t.Errorf("Window duration should not exceed 1 year: %v", duration) + } + }) + } +} + +func TestEvaluationEnvelope_UnmarshalJSON(t *testing.T) { + tests := []struct { + name string + jsonInput string + wantKind EvaluationKind + wantSpec interface{} + wantError bool + }{ + { + name: "rolling evaluation with valid data", + jsonInput: `{"kind":"rolling","spec":{"evalWindow":"5m","frequency":"1m"}}`, + wantKind: RollingEvaluation, + wantSpec: RollingWindow{ + EvalWindow: Duration(5 * time.Minute), + Frequency: Duration(1 * time.Minute), + }, + }, + { + name: "cumulative evaluation with valid data", + jsonInput: `{"kind":"cumulative","spec":{"schedule":{"type":"hourly","minute":30},"frequency":"2m","timezone":"UTC"}}`, + wantKind: CumulativeEvaluation, + wantSpec: CumulativeWindow{ + Schedule: CumulativeSchedule{ + Type: ScheduleTypeHourly, + Minute: intPtr(30), + }, + Frequency: Duration(2 * time.Minute), + Timezone: "UTC", + }, + }, + { + name: "rolling evaluation with validation error - zero evalWindow", + jsonInput: `{"kind":"rolling","spec":{"evalWindow":"0s","frequency":"1m"}}`, + wantError: true, + }, + { + name: "rolling evaluation with validation error - zero frequency", + jsonInput: `{"kind":"rolling","spec":{"evalWindow":"5m","frequency":"0s"}}`, + wantError: true, + }, + { + name: "cumulative evaluation with validation error - zero frequency", + jsonInput: `{"kind":"cumulative","spec":{"schedule":{"type":"hourly","minute":30},"frequency":"0s","timezone":"UTC"}}`, + wantError: true, + }, + { + name: "cumulative evaluation with validation error - invalid timezone", + jsonInput: `{"kind":"cumulative","spec":{"schedule":{"type":"daily","hour":9,"minute":30},"frequency":"1m","timezone":"Invalid/Timezone"}}`, + wantError: true, + }, + { + name: "cumulative evaluation with validation error - missing minute for hourly", + jsonInput: `{"kind":"cumulative","spec":{"schedule":{"type":"hourly"},"frequency":"1m","timezone":"UTC"}}`, + wantError: true, + }, + { + name: "unknown evaluation kind", + jsonInput: `{"kind":"unknown","spec":{"evalWindow":"5m","frequency":"1h"}}`, + wantError: true, + }, + { + name: "invalid JSON", + jsonInput: `{"kind":"rolling","spec":invalid}`, + wantError: true, + }, + { + name: "missing kind field", + jsonInput: `{"spec":{"evalWindow":"5m","frequency":"1m"}}`, + wantError: true, + }, + { + name: "missing spec field", + jsonInput: `{"kind":"rolling"}`, + wantError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var envelope EvaluationEnvelope + err := json.Unmarshal([]byte(tt.jsonInput), &envelope) + + if tt.wantError { + if err == nil { + t.Errorf("EvaluationEnvelope.UnmarshalJSON() expected error, got none") + } + return + } + + if err != nil { + t.Fatalf("EvaluationEnvelope.UnmarshalJSON() unexpected error = %v", err) + } + + if envelope.Kind != tt.wantKind { + t.Errorf("EvaluationEnvelope.Kind = %v, want %v", envelope.Kind, tt.wantKind) + } + + // Check spec content based on type + switch tt.wantKind { + case RollingEvaluation: + gotSpec, ok := envelope.Spec.(RollingWindow) + if !ok { + t.Fatalf("Expected RollingWindow spec, got %T", envelope.Spec) + } + wantSpec := tt.wantSpec.(RollingWindow) + if gotSpec.EvalWindow != wantSpec.EvalWindow { + t.Errorf("RollingWindow.EvalWindow = %v, want %v", gotSpec.EvalWindow, wantSpec.EvalWindow) + } + if gotSpec.Frequency != wantSpec.Frequency { + t.Errorf("RollingWindow.Frequency = %v, want %v", gotSpec.Frequency, wantSpec.Frequency) + } + case CumulativeEvaluation: + gotSpec, ok := envelope.Spec.(CumulativeWindow) + if !ok { + t.Fatalf("Expected CumulativeWindow spec, got %T", envelope.Spec) + } + wantSpec := tt.wantSpec.(CumulativeWindow) + if gotSpec.Schedule.Type != wantSpec.Schedule.Type { + t.Errorf("CumulativeWindow.Schedule.Type = %v, want %v", gotSpec.Schedule.Type, wantSpec.Schedule.Type) + } + if (gotSpec.Schedule.Minute == nil) != (wantSpec.Schedule.Minute == nil) || + (gotSpec.Schedule.Minute != nil && wantSpec.Schedule.Minute != nil && *gotSpec.Schedule.Minute != *wantSpec.Schedule.Minute) { + t.Errorf("CumulativeWindow.Schedule.Minute = %v, want %v", gotSpec.Schedule.Minute, wantSpec.Schedule.Minute) + } + if gotSpec.Frequency != wantSpec.Frequency { + t.Errorf("CumulativeWindow.Frequency = %v, want %v", gotSpec.Frequency, wantSpec.Frequency) + } + if gotSpec.Timezone != wantSpec.Timezone { + t.Errorf("CumulativeWindow.Timezone = %v, want %v", gotSpec.Timezone, wantSpec.Timezone) + } + } + }) + } +} From d075ceecba1fb61be5174e8fd432b6dc9f9aceec Mon Sep 17 00:00:00 2001 From: manika-signoz Date: Mon, 15 Sep 2025 18:54:37 +0530 Subject: [PATCH 25/51] chore: copy changes and minor fixes, onboarding hint (#9095) --- .../AboutSigNozQuestions/AboutSigNozQuestions.tsx | 2 +- .../OnboardingQuestionaire/OrgQuestions/OrgQuestions.tsx | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/frontend/src/container/OnboardingQuestionaire/AboutSigNozQuestions/AboutSigNozQuestions.tsx b/frontend/src/container/OnboardingQuestionaire/AboutSigNozQuestions/AboutSigNozQuestions.tsx index 5c28f3a2f9c1..2124e995150f 100644 --- a/frontend/src/container/OnboardingQuestionaire/AboutSigNozQuestions/AboutSigNozQuestions.tsx +++ b/frontend/src/container/OnboardingQuestionaire/AboutSigNozQuestions/AboutSigNozQuestions.tsx @@ -98,7 +98,7 @@ export function AboutSigNozQuestions({