mirror of
https://github.com/SigNoz/signoz.git
synced 2025-12-17 23:47:12 +00:00
feat: new query builder - base setup, parser, api and util files
This commit is contained in:
parent
ba2ed3ad22
commit
437c0c9479
@ -1,4 +1,5 @@
|
||||
module.exports = {
|
||||
ignorePatterns: ['src/parser/*.ts'],
|
||||
env: {
|
||||
browser: true,
|
||||
es2021: true,
|
||||
|
||||
@ -28,6 +28,8 @@
|
||||
"dependencies": {
|
||||
"@ant-design/colors": "6.0.0",
|
||||
"@ant-design/icons": "4.8.0",
|
||||
"@codemirror/autocomplete": "6.18.6",
|
||||
"@codemirror/lang-javascript": "6.2.3",
|
||||
"@dnd-kit/core": "6.1.0",
|
||||
"@dnd-kit/modifiers": "7.0.0",
|
||||
"@dnd-kit/sortable": "8.0.0",
|
||||
@ -43,6 +45,8 @@
|
||||
"@signozhq/design-tokens": "1.1.4",
|
||||
"@tanstack/react-table": "8.20.6",
|
||||
"@tanstack/react-virtual": "3.11.2",
|
||||
"@uiw/codemirror-theme-copilot": "4.23.11",
|
||||
"@uiw/react-codemirror": "4.23.10",
|
||||
"@uiw/react-md-editor": "3.23.5",
|
||||
"@visx/group": "3.3.0",
|
||||
"@visx/hierarchy": "3.12.0",
|
||||
@ -53,6 +57,7 @@
|
||||
"antd": "5.11.0",
|
||||
"antd-table-saveas-excel": "2.2.1",
|
||||
"axios": "1.8.2",
|
||||
"antlr4": "4.13.2",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-jest": "^29.6.4",
|
||||
"babel-loader": "9.1.3",
|
||||
|
||||
@ -3,6 +3,7 @@ const apiV1 = '/api/v1/';
|
||||
export const apiV2 = '/api/v2/';
|
||||
export const apiV3 = '/api/v3/';
|
||||
export const apiV4 = '/api/v4/';
|
||||
export const apiV5 = '/api/v5/';
|
||||
export const gatewayApiV1 = '/api/gateway/v1/';
|
||||
export const gatewayApiV2 = '/api/gateway/v2/';
|
||||
export const apiAlertManager = '/api/alertmanager/';
|
||||
|
||||
@ -19,6 +19,7 @@ import apiV1, {
|
||||
apiV2,
|
||||
apiV3,
|
||||
apiV4,
|
||||
apiV5,
|
||||
gatewayApiV1,
|
||||
gatewayApiV2,
|
||||
} from './apiV1';
|
||||
@ -171,6 +172,18 @@ ApiV4Instance.interceptors.response.use(
|
||||
ApiV4Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// axios V5
|
||||
export const ApiV5Instance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${apiV5}`,
|
||||
});
|
||||
|
||||
ApiV5Instance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
);
|
||||
ApiV5Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// axios Base
|
||||
export const ApiBaseInstance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${apiV1}`,
|
||||
|
||||
22
frontend/src/api/querySuggestions/getKeySuggestions.ts
Normal file
22
frontend/src/api/querySuggestions/getKeySuggestions.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import axios from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import {
|
||||
QueryKeyRequestProps,
|
||||
QueryKeySuggestionsResponseProps,
|
||||
} from 'types/api/querySuggestions/types';
|
||||
|
||||
export const getKeySuggestions = (
|
||||
props: QueryKeyRequestProps,
|
||||
): Promise<AxiosResponse<QueryKeySuggestionsResponseProps>> => {
|
||||
const {
|
||||
signal = '',
|
||||
searchText = '',
|
||||
metricName = '',
|
||||
fieldContext = '',
|
||||
fieldDataType = '',
|
||||
} = props;
|
||||
|
||||
return axios.get(
|
||||
`/fields/keys?signal=${signal}&searchText=${searchText}&metricName=${metricName}&fieldContext=${fieldContext}&fieldDataType=${fieldDataType}`,
|
||||
);
|
||||
};
|
||||
20
frontend/src/api/querySuggestions/getValueSuggestion.ts
Normal file
20
frontend/src/api/querySuggestions/getValueSuggestion.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import axios from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import {
|
||||
QueryKeyValueRequestProps,
|
||||
QueryKeyValueSuggestionsResponseProps,
|
||||
} from 'types/api/querySuggestions/types';
|
||||
|
||||
export const getValueSuggestions = (
|
||||
props: QueryKeyValueRequestProps,
|
||||
): Promise<AxiosResponse<QueryKeyValueSuggestionsResponseProps>> => {
|
||||
const { signal, key, searchText } = props;
|
||||
|
||||
const encodedSignal = encodeURIComponent(signal);
|
||||
const encodedKey = encodeURIComponent(key);
|
||||
const encodedSearchText = encodeURIComponent(searchText);
|
||||
|
||||
return axios.get(
|
||||
`/fields/values?signal=${encodedSignal}&name=${encodedKey}&searchText=${encodedSearchText}`,
|
||||
);
|
||||
};
|
||||
168
frontend/src/api/v5/queryRange/constants.ts
Normal file
168
frontend/src/api/v5/queryRange/constants.ts
Normal file
@ -0,0 +1,168 @@
|
||||
// V5 Query Range Constants
|
||||
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import {
|
||||
FunctionName,
|
||||
RequestType,
|
||||
SignalType,
|
||||
Step,
|
||||
} from 'types/api/v5/queryRange';
|
||||
|
||||
// ===================== Schema and Version Constants =====================
|
||||
|
||||
export const SCHEMA_VERSION_V5 = ENTITY_VERSION_V5;
|
||||
export const API_VERSION_V5 = 'v5';
|
||||
|
||||
// ===================== Default Values =====================
|
||||
|
||||
export const DEFAULT_STEP_INTERVAL: Step = '60s';
|
||||
export const DEFAULT_LIMIT = 100;
|
||||
export const DEFAULT_OFFSET = 0;
|
||||
|
||||
// ===================== Request Type Constants =====================
|
||||
|
||||
export const REQUEST_TYPES: Record<string, RequestType> = {
|
||||
SCALAR: 'scalar',
|
||||
TIME_SERIES: 'time_series',
|
||||
RAW: 'raw',
|
||||
DISTRIBUTION: 'distribution',
|
||||
} as const;
|
||||
|
||||
// ===================== Signal Type Constants =====================
|
||||
|
||||
export const SIGNAL_TYPES: Record<string, SignalType> = {
|
||||
TRACES: 'traces',
|
||||
LOGS: 'logs',
|
||||
METRICS: 'metrics',
|
||||
} as const;
|
||||
|
||||
// ===================== Common Aggregation Expressions =====================
|
||||
|
||||
export const TRACE_AGGREGATIONS = {
|
||||
COUNT: 'count()',
|
||||
COUNT_DISTINCT_TRACE_ID: 'count_distinct(traceID)',
|
||||
AVG_DURATION: 'avg(duration_nano)',
|
||||
P50_DURATION: 'p50(duration_nano)',
|
||||
P95_DURATION: 'p95(duration_nano)',
|
||||
P99_DURATION: 'p99(duration_nano)',
|
||||
MAX_DURATION: 'max(duration_nano)',
|
||||
MIN_DURATION: 'min(duration_nano)',
|
||||
SUM_DURATION: 'sum(duration_nano)',
|
||||
} as const;
|
||||
|
||||
export const LOG_AGGREGATIONS = {
|
||||
COUNT: 'count()',
|
||||
COUNT_DISTINCT_HOST: 'count_distinct(host.name)',
|
||||
COUNT_DISTINCT_SERVICE: 'count_distinct(service.name)',
|
||||
COUNT_DISTINCT_CONTAINER: 'count_distinct(container.name)',
|
||||
} as const;
|
||||
|
||||
// ===================== Common Filter Expressions =====================
|
||||
|
||||
export const COMMON_FILTERS = {
|
||||
// Trace filters
|
||||
SERVER_SPANS: "kind_string = 'Server'",
|
||||
CLIENT_SPANS: "kind_string = 'Client'",
|
||||
INTERNAL_SPANS: "kind_string = 'Internal'",
|
||||
ERROR_SPANS: 'http.status_code >= 400',
|
||||
SUCCESS_SPANS: 'http.status_code < 400',
|
||||
|
||||
// Common service filters
|
||||
EXCLUDE_HEALTH_CHECKS: "http.route != '/health' AND http.route != '/ping'",
|
||||
HTTP_REQUESTS: "http.method != ''",
|
||||
|
||||
// Log filters
|
||||
ERROR_LOGS: "severity_text = 'ERROR'",
|
||||
WARN_LOGS: "severity_text = 'WARN'",
|
||||
INFO_LOGS: "severity_text = 'INFO'",
|
||||
DEBUG_LOGS: "severity_text = 'DEBUG'",
|
||||
} as const;
|
||||
|
||||
// ===================== Common Group By Fields =====================
|
||||
|
||||
export const COMMON_GROUP_BY_FIELDS = {
|
||||
SERVICE_NAME: {
|
||||
name: 'service.name',
|
||||
fieldDataType: 'string' as const,
|
||||
fieldContext: 'resource' as const,
|
||||
},
|
||||
HTTP_METHOD: {
|
||||
name: 'http.method',
|
||||
fieldDataType: 'string' as const,
|
||||
fieldContext: 'attribute' as const,
|
||||
},
|
||||
HTTP_ROUTE: {
|
||||
name: 'http.route',
|
||||
fieldDataType: 'string' as const,
|
||||
fieldContext: 'attribute' as const,
|
||||
},
|
||||
HTTP_STATUS_CODE: {
|
||||
name: 'http.status_code',
|
||||
fieldDataType: 'int64' as const,
|
||||
fieldContext: 'attribute' as const,
|
||||
},
|
||||
HOST_NAME: {
|
||||
name: 'host.name',
|
||||
fieldDataType: 'string' as const,
|
||||
fieldContext: 'resource' as const,
|
||||
},
|
||||
CONTAINER_NAME: {
|
||||
name: 'container.name',
|
||||
fieldDataType: 'string' as const,
|
||||
fieldContext: 'resource' as const,
|
||||
},
|
||||
} as const;
|
||||
|
||||
// ===================== Function Names =====================
|
||||
|
||||
export const FUNCTION_NAMES: Record<string, FunctionName> = {
|
||||
CUT_OFF_MIN: 'cutOffMin',
|
||||
CUT_OFF_MAX: 'cutOffMax',
|
||||
CLAMP_MIN: 'clampMin',
|
||||
CLAMP_MAX: 'clampMax',
|
||||
ABSOLUTE: 'absolute',
|
||||
RUNNING_DIFF: 'runningDiff',
|
||||
LOG2: 'log2',
|
||||
LOG10: 'log10',
|
||||
CUM_SUM: 'cumSum',
|
||||
EWMA3: 'ewma3',
|
||||
EWMA5: 'ewma5',
|
||||
EWMA7: 'ewma7',
|
||||
MEDIAN3: 'median3',
|
||||
MEDIAN5: 'median5',
|
||||
MEDIAN7: 'median7',
|
||||
TIME_SHIFT: 'timeShift',
|
||||
ANOMALY: 'anomaly',
|
||||
} as const;
|
||||
|
||||
// ===================== Common Step Intervals =====================
|
||||
|
||||
export const STEP_INTERVALS = {
|
||||
FIFTEEN_SECONDS: '15s',
|
||||
THIRTY_SECONDS: '30s',
|
||||
ONE_MINUTE: '60s',
|
||||
FIVE_MINUTES: '300s',
|
||||
TEN_MINUTES: '600s',
|
||||
FIFTEEN_MINUTES: '900s',
|
||||
THIRTY_MINUTES: '1800s',
|
||||
ONE_HOUR: '3600s',
|
||||
TWO_HOURS: '7200s',
|
||||
SIX_HOURS: '21600s',
|
||||
TWELVE_HOURS: '43200s',
|
||||
ONE_DAY: '86400s',
|
||||
} as const;
|
||||
|
||||
// ===================== Time Range Presets =====================
|
||||
|
||||
export const TIME_RANGE_PRESETS = {
|
||||
LAST_5_MINUTES: 5 * 60 * 1000,
|
||||
LAST_15_MINUTES: 15 * 60 * 1000,
|
||||
LAST_30_MINUTES: 30 * 60 * 1000,
|
||||
LAST_HOUR: 60 * 60 * 1000,
|
||||
LAST_3_HOURS: 3 * 60 * 60 * 1000,
|
||||
LAST_6_HOURS: 6 * 60 * 60 * 1000,
|
||||
LAST_12_HOURS: 12 * 60 * 60 * 1000,
|
||||
LAST_24_HOURS: 24 * 60 * 60 * 1000,
|
||||
LAST_3_DAYS: 3 * 24 * 60 * 60 * 1000,
|
||||
LAST_7_DAYS: 7 * 24 * 60 * 60 * 1000,
|
||||
} as const;
|
||||
367
frontend/src/api/v5/queryRange/convertV5Response.ts
Normal file
367
frontend/src/api/v5/queryRange/convertV5Response.ts
Normal file
@ -0,0 +1,367 @@
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadV3 } from 'types/api/metrics/getQueryRange';
|
||||
import {
|
||||
DistributionData,
|
||||
MetricRangePayloadV5,
|
||||
QueryRangeRequestV5,
|
||||
RawData,
|
||||
ScalarData,
|
||||
TimeSeriesData,
|
||||
} from 'types/api/v5/queryRange';
|
||||
import { QueryDataV3 } from 'types/api/widgets/getQuery';
|
||||
|
||||
function getColName(
|
||||
col: ScalarData['columns'][number],
|
||||
legendMap: Record<string, string>,
|
||||
aggregationPerQuery: Record<string, any>,
|
||||
): string {
|
||||
const aggregation =
|
||||
aggregationPerQuery?.[col.queryName]?.[col.aggregationIndex];
|
||||
const legend = legendMap[col.queryName];
|
||||
const aggregationName = aggregation?.alias || aggregation?.expression || '';
|
||||
|
||||
if (col.columnType === 'group') {
|
||||
return col.name;
|
||||
}
|
||||
|
||||
if (aggregationName && aggregationPerQuery[col.queryName].length > 1) {
|
||||
if (legend) {
|
||||
return `${aggregationName}-${legend}`;
|
||||
}
|
||||
return `${col.queryName}.${aggregationName}`;
|
||||
}
|
||||
return legend || col.queryName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts V5 TimeSeriesData to legacy format
|
||||
*/
|
||||
function convertTimeSeriesData(
|
||||
timeSeriesData: TimeSeriesData,
|
||||
legendMap: Record<string, string>,
|
||||
): QueryDataV3 {
|
||||
// Convert V5 time series format to legacy QueryDataV3 format
|
||||
|
||||
return {
|
||||
queryName: timeSeriesData.queryName,
|
||||
legend: legendMap[timeSeriesData.queryName] || timeSeriesData.queryName,
|
||||
series: timeSeriesData?.aggregations?.flatMap((aggregation) => {
|
||||
const { index, alias, series } = aggregation;
|
||||
|
||||
if (!series || !series.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return series.map((series) => ({
|
||||
labels: series.labels
|
||||
? Object.fromEntries(
|
||||
series.labels.map((label) => [label.key.name, label.value]),
|
||||
)
|
||||
: {},
|
||||
labelsArray: series.labels
|
||||
? series.labels.map((label) => ({ [label.key.name]: label.value }))
|
||||
: [],
|
||||
values: series.values.map((value) => ({
|
||||
timestamp: value.timestamp,
|
||||
value: String(value.value),
|
||||
})),
|
||||
metaData: {
|
||||
alias,
|
||||
index,
|
||||
queryName: timeSeriesData.queryName,
|
||||
},
|
||||
}));
|
||||
}),
|
||||
list: null,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts V5 ScalarData array to legacy format with table structure
|
||||
*/
|
||||
function convertScalarDataArrayToTable(
|
||||
scalarDataArray: ScalarData[],
|
||||
legendMap: Record<string, string>,
|
||||
aggregationPerQuery: Record<string, any>,
|
||||
): QueryDataV3[] {
|
||||
// If no scalar data, return empty structure
|
||||
|
||||
if (!scalarDataArray || scalarDataArray.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Process each scalar data separately to maintain query separation
|
||||
return scalarDataArray?.map((scalarData) => {
|
||||
// Get query name from the first column
|
||||
const queryName = scalarData?.columns?.[0]?.queryName || '';
|
||||
|
||||
if ((scalarData as any)?.aggregations?.length > 0) {
|
||||
return {
|
||||
...convertTimeSeriesData(scalarData as any, legendMap),
|
||||
table: {
|
||||
columns: [],
|
||||
rows: [],
|
||||
},
|
||||
list: null,
|
||||
};
|
||||
}
|
||||
|
||||
// Collect columns for this specific query
|
||||
const columns = scalarData?.columns?.map((col) => ({
|
||||
name: getColName(col, legendMap, aggregationPerQuery),
|
||||
queryName: col.queryName,
|
||||
isValueColumn: col.columnType === 'aggregation',
|
||||
}));
|
||||
|
||||
// Process rows for this specific query
|
||||
const rows = scalarData?.data?.map((dataRow) => {
|
||||
const rowData: Record<string, any> = {};
|
||||
|
||||
scalarData?.columns?.forEach((col, colIndex) => {
|
||||
const columnName = getColName(col, legendMap, aggregationPerQuery);
|
||||
rowData[columnName] = dataRow[colIndex];
|
||||
});
|
||||
|
||||
return { data: rowData };
|
||||
});
|
||||
|
||||
return {
|
||||
queryName,
|
||||
legend: legendMap[queryName] || '',
|
||||
series: null,
|
||||
list: null,
|
||||
table: {
|
||||
columns,
|
||||
rows,
|
||||
},
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function convertScalerWithFormatForWeb(
|
||||
scalarDataArray: ScalarData[],
|
||||
legendMap: Record<string, string>,
|
||||
aggregationPerQuery: Record<string, any>,
|
||||
): QueryDataV3[] {
|
||||
if (!scalarDataArray || scalarDataArray.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return scalarDataArray.map((scalarData) => {
|
||||
const columns =
|
||||
scalarData.columns?.map((col) => {
|
||||
const colName = getColName(col, legendMap, aggregationPerQuery);
|
||||
|
||||
return {
|
||||
name: colName,
|
||||
queryName: col.queryName,
|
||||
isValueColumn: col.columnType === 'aggregation',
|
||||
};
|
||||
}) || [];
|
||||
|
||||
const rows =
|
||||
scalarData.data?.map((dataRow) => {
|
||||
const rowData: Record<string, any> = {};
|
||||
columns?.forEach((col, colIndex) => {
|
||||
rowData[col.name] = dataRow[colIndex];
|
||||
});
|
||||
return { data: rowData };
|
||||
}) || [];
|
||||
|
||||
const queryName = scalarData.columns?.[0]?.queryName || '';
|
||||
|
||||
return {
|
||||
queryName,
|
||||
legend: legendMap[queryName] || queryName,
|
||||
series: null,
|
||||
list: null,
|
||||
table: {
|
||||
columns,
|
||||
rows,
|
||||
},
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts V5 RawData to legacy format
|
||||
*/
|
||||
function convertRawData(
|
||||
rawData: RawData,
|
||||
legendMap: Record<string, string>,
|
||||
): QueryDataV3 {
|
||||
// Convert V5 raw format to legacy QueryDataV3 format
|
||||
return {
|
||||
queryName: rawData.queryName,
|
||||
legend: legendMap[rawData.queryName] || rawData.queryName,
|
||||
series: null,
|
||||
list: rawData.rows?.map((row) => ({
|
||||
timestamp: row.timestamp,
|
||||
data: {
|
||||
// Map raw data to ILog structure - spread row.data first to include all properties
|
||||
...row.data,
|
||||
date: row.timestamp,
|
||||
} as any,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts V5 DistributionData to legacy format
|
||||
*/
|
||||
function convertDistributionData(
|
||||
distributionData: DistributionData,
|
||||
legendMap: Record<string, string>,
|
||||
): any {
|
||||
// eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
// Convert V5 distribution format to legacy histogram format
|
||||
return {
|
||||
...distributionData,
|
||||
legendMap,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to convert V5 data based on type
|
||||
*/
|
||||
function convertV5DataByType(
|
||||
v5Data: any,
|
||||
legendMap: Record<string, string>,
|
||||
aggregationPerQuery: Record<string, any>,
|
||||
): MetricRangePayloadV3['data'] {
|
||||
switch (v5Data?.type) {
|
||||
case 'time_series': {
|
||||
const timeSeriesData = v5Data.data.results as TimeSeriesData[];
|
||||
return {
|
||||
resultType: 'time_series',
|
||||
result: timeSeriesData.map((timeSeries) =>
|
||||
convertTimeSeriesData(timeSeries, legendMap),
|
||||
),
|
||||
};
|
||||
}
|
||||
case 'scalar': {
|
||||
const scalarData = v5Data.data.results as ScalarData[];
|
||||
// For scalar data, combine all results into separate table entries
|
||||
const combinedTables = convertScalarDataArrayToTable(
|
||||
scalarData,
|
||||
legendMap,
|
||||
aggregationPerQuery,
|
||||
);
|
||||
return {
|
||||
resultType: 'scalar',
|
||||
result: combinedTables,
|
||||
};
|
||||
}
|
||||
case 'raw': {
|
||||
const rawData = v5Data.data.results as RawData[];
|
||||
return {
|
||||
resultType: 'raw',
|
||||
result: rawData.map((raw) => convertRawData(raw, legendMap)),
|
||||
};
|
||||
}
|
||||
case 'distribution': {
|
||||
const distributionData = v5Data.data.results as DistributionData[];
|
||||
return {
|
||||
resultType: 'distribution',
|
||||
result: distributionData.map((distribution) =>
|
||||
convertDistributionData(distribution, legendMap),
|
||||
),
|
||||
};
|
||||
}
|
||||
default:
|
||||
return {
|
||||
resultType: '',
|
||||
result: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts V5 API response to legacy format expected by frontend components
|
||||
*/
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export function convertV5ResponseToLegacy(
|
||||
v5Response: SuccessResponse<MetricRangePayloadV5>,
|
||||
legendMap: Record<string, string>,
|
||||
formatForWeb?: boolean,
|
||||
): SuccessResponse<MetricRangePayloadV3> {
|
||||
const { payload, params } = v5Response;
|
||||
const v5Data = payload?.data;
|
||||
|
||||
const aggregationPerQuery =
|
||||
(params as QueryRangeRequestV5)?.compositeQuery?.queries
|
||||
?.filter((query) => query.type === 'builder_query')
|
||||
.reduce((acc, query) => {
|
||||
if (
|
||||
query.type === 'builder_query' &&
|
||||
'aggregations' in query.spec &&
|
||||
query.spec.name
|
||||
) {
|
||||
acc[query.spec.name] = query.spec.aggregations;
|
||||
}
|
||||
return acc;
|
||||
}, {} as Record<string, any>) || {};
|
||||
|
||||
// If formatForWeb is true, return as-is (like existing logic)
|
||||
if (formatForWeb && v5Data?.type === 'scalar') {
|
||||
const scalarData = v5Data.data.results as ScalarData[];
|
||||
const webTables = convertScalerWithFormatForWeb(
|
||||
scalarData,
|
||||
legendMap,
|
||||
aggregationPerQuery,
|
||||
);
|
||||
return {
|
||||
...v5Response,
|
||||
payload: {
|
||||
data: {
|
||||
resultType: 'scalar',
|
||||
result: webTables,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// Convert based on V5 response type
|
||||
const convertedData = convertV5DataByType(
|
||||
v5Data,
|
||||
legendMap,
|
||||
aggregationPerQuery,
|
||||
);
|
||||
|
||||
// Create legacy-compatible response structure
|
||||
const legacyResponse: SuccessResponse<MetricRangePayloadV3> = {
|
||||
...v5Response,
|
||||
payload: {
|
||||
data: convertedData,
|
||||
},
|
||||
};
|
||||
|
||||
// Apply legend mapping (similar to existing logic)
|
||||
if (legacyResponse.payload?.data?.result) {
|
||||
legacyResponse.payload.data.result = legacyResponse.payload.data.result.map(
|
||||
(queryData: any) => {
|
||||
// eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
const newQueryData = queryData;
|
||||
newQueryData.legend = legendMap[queryData.queryName];
|
||||
|
||||
// If metric names is an empty object
|
||||
if (isEmpty(queryData.metric)) {
|
||||
// If metrics list is empty && the user haven't defined a legend then add the legend equal to the name of the query.
|
||||
if (newQueryData.legend === undefined || newQueryData.legend === null) {
|
||||
newQueryData.legend = queryData.queryName;
|
||||
}
|
||||
// If name of the query and the legend if inserted is same then add the same to the metrics object.
|
||||
if (queryData.queryName === newQueryData.legend) {
|
||||
newQueryData.metric = newQueryData.metric || {};
|
||||
newQueryData.metric[queryData.queryName] = queryData.queryName;
|
||||
}
|
||||
}
|
||||
|
||||
return newQueryData;
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
return legacyResponse;
|
||||
}
|
||||
45
frontend/src/api/v5/queryRange/getQueryRange.ts
Normal file
45
frontend/src/api/v5/queryRange/getQueryRange.ts
Normal file
@ -0,0 +1,45 @@
|
||||
import { ApiV5Instance } from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import {
|
||||
MetricRangePayloadV5,
|
||||
QueryRangePayloadV5,
|
||||
} from 'types/api/v5/queryRange';
|
||||
|
||||
export const getQueryRangeV5 = async (
|
||||
props: QueryRangePayloadV5,
|
||||
version: string,
|
||||
signal: AbortSignal,
|
||||
headers?: Record<string, string>,
|
||||
): Promise<SuccessResponseV2<MetricRangePayloadV5>> => {
|
||||
try {
|
||||
if (version && version === ENTITY_VERSION_V5) {
|
||||
const response = await ApiV5Instance.post('/query_range', props, {
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data,
|
||||
};
|
||||
}
|
||||
|
||||
// Default V5 behavior
|
||||
const response = await ApiV5Instance.post('/query_range', props, {
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default getQueryRangeV5;
|
||||
408
frontend/src/api/v5/queryRange/prepareQueryRangePayloadV5.ts
Normal file
408
frontend/src/api/v5/queryRange/prepareQueryRangePayloadV5.ts
Normal file
@ -0,0 +1,408 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
|
||||
import { mapQueryDataToApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataToApi';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
QueryFunctionProps,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
BaseBuilderQuery,
|
||||
FieldContext,
|
||||
FieldDataType,
|
||||
FunctionName,
|
||||
GroupByKey,
|
||||
LogAggregation,
|
||||
MetricAggregation,
|
||||
OrderBy,
|
||||
QueryEnvelope,
|
||||
QueryFunction,
|
||||
QueryRangePayloadV5,
|
||||
QueryType,
|
||||
RequestType,
|
||||
TelemetryFieldKey,
|
||||
TraceAggregation,
|
||||
VariableItem,
|
||||
} from 'types/api/v5/queryRange';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
type PrepareQueryRangePayloadV5Result = {
|
||||
queryPayload: QueryRangePayloadV5;
|
||||
legendMap: Record<string, string>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Maps panel types to V5 request types
|
||||
*/
|
||||
function mapPanelTypeToRequestType(panelType: PANEL_TYPES): RequestType {
|
||||
switch (panelType) {
|
||||
case PANEL_TYPES.TIME_SERIES:
|
||||
case PANEL_TYPES.BAR:
|
||||
return 'time_series';
|
||||
case PANEL_TYPES.TABLE:
|
||||
case PANEL_TYPES.PIE:
|
||||
case PANEL_TYPES.VALUE:
|
||||
case PANEL_TYPES.TRACE:
|
||||
return 'scalar';
|
||||
case PANEL_TYPES.LIST:
|
||||
return 'raw';
|
||||
case PANEL_TYPES.HISTOGRAM:
|
||||
return 'distribution';
|
||||
default:
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets signal type from data source
|
||||
*/
|
||||
function getSignalType(dataSource: string): 'traces' | 'logs' | 'metrics' {
|
||||
if (dataSource === 'traces') return 'traces';
|
||||
if (dataSource === 'logs') return 'logs';
|
||||
return 'metrics';
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates base spec for builder queries
|
||||
*/
|
||||
function createBaseSpec(
|
||||
queryData: IBuilderQuery,
|
||||
requestType: RequestType,
|
||||
panelType?: PANEL_TYPES,
|
||||
): BaseBuilderQuery {
|
||||
const nonEmptySelectColumns = (queryData.selectColumns as (
|
||||
| BaseAutocompleteData
|
||||
| TelemetryFieldKey
|
||||
)[])?.filter((c) => ('key' in c ? c?.key : c?.name));
|
||||
|
||||
return {
|
||||
stepInterval: queryData?.stepInterval || undefined,
|
||||
disabled: queryData.disabled,
|
||||
filter: queryData?.filter?.expression ? queryData.filter : undefined,
|
||||
groupBy:
|
||||
queryData.groupBy?.length > 0
|
||||
? queryData.groupBy.map(
|
||||
(item: any): GroupByKey => ({
|
||||
name: item.key,
|
||||
fieldDataType: item?.dataType,
|
||||
fieldContext: item?.type,
|
||||
description: item?.description,
|
||||
unit: item?.unit,
|
||||
signal: item?.signal,
|
||||
materialized: item?.materialized,
|
||||
}),
|
||||
)
|
||||
: undefined,
|
||||
limit:
|
||||
panelType === PANEL_TYPES.TABLE || panelType === PANEL_TYPES.LIST
|
||||
? queryData.limit || queryData.pageSize || undefined
|
||||
: queryData.limit || undefined,
|
||||
offset: requestType === 'raw' ? queryData.offset : undefined,
|
||||
order:
|
||||
queryData.orderBy.length > 0
|
||||
? queryData.orderBy.map(
|
||||
(order: any): OrderBy => ({
|
||||
key: {
|
||||
name: order.columnName,
|
||||
},
|
||||
direction: order.order,
|
||||
}),
|
||||
)
|
||||
: undefined,
|
||||
// legend: isEmpty(queryData.legend) ? undefined : queryData.legend,
|
||||
having: isEmpty(queryData.havingExpression)
|
||||
? undefined
|
||||
: queryData?.havingExpression,
|
||||
functions: isEmpty(queryData.functions)
|
||||
? undefined
|
||||
: queryData.functions.map(
|
||||
(func: QueryFunctionProps): QueryFunction => ({
|
||||
name: func.name as FunctionName,
|
||||
args: func.args.map((arg) => ({
|
||||
// name: arg.name,
|
||||
value: arg,
|
||||
})),
|
||||
}),
|
||||
),
|
||||
selectFields: isEmpty(nonEmptySelectColumns)
|
||||
? undefined
|
||||
: nonEmptySelectColumns?.map(
|
||||
(column: any): TelemetryFieldKey => ({
|
||||
name: column.name ?? column.key,
|
||||
fieldDataType:
|
||||
column?.fieldDataType ?? (column?.dataType as FieldDataType),
|
||||
fieldContext: column?.fieldContext ?? (column?.type as FieldContext),
|
||||
signal: column?.signal ?? undefined,
|
||||
}),
|
||||
),
|
||||
};
|
||||
}
|
||||
// Utility to parse aggregation expressions with optional alias
|
||||
export function parseAggregations(
|
||||
expression: string,
|
||||
): { expression: string; alias?: string }[] {
|
||||
const result: { expression: string; alias?: string }[] = [];
|
||||
const regex = /([a-zA-Z0-9_]+\([^)]*\))(?:\s*as\s+([a-zA-Z0-9_]+))?/g;
|
||||
let match = regex.exec(expression);
|
||||
while (match !== null) {
|
||||
const expr = match[1];
|
||||
const alias = match[2];
|
||||
if (alias) {
|
||||
result.push({ expression: expr, alias });
|
||||
} else {
|
||||
result.push({ expression: expr });
|
||||
}
|
||||
match = regex.exec(expression);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function createAggregation(
|
||||
queryData: any,
|
||||
): TraceAggregation[] | LogAggregation[] | MetricAggregation[] {
|
||||
if (!queryData) {
|
||||
return [];
|
||||
}
|
||||
if (queryData.dataSource === DataSource.METRICS) {
|
||||
return [
|
||||
{
|
||||
metricName: queryData?.aggregateAttribute?.key,
|
||||
temporality: queryData?.aggregateAttribute?.temporality,
|
||||
timeAggregation: queryData?.timeAggregation,
|
||||
spaceAggregation: queryData?.spaceAggregation,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
if (queryData.aggregations?.length > 0) {
|
||||
return isEmpty(parseAggregations(queryData.aggregations?.[0].expression))
|
||||
? [{ expression: 'count()' }]
|
||||
: parseAggregations(queryData.aggregations?.[0].expression);
|
||||
}
|
||||
|
||||
return [{ expression: 'count()' }];
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts query builder data to V5 builder queries
|
||||
*/
|
||||
function convertBuilderQueriesToV5(
|
||||
builderQueries: Record<string, any>, // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
requestType: RequestType,
|
||||
panelType?: PANEL_TYPES,
|
||||
): QueryEnvelope[] {
|
||||
return Object.entries(builderQueries).map(
|
||||
([queryName, queryData]): QueryEnvelope => {
|
||||
const signal = getSignalType(queryData.dataSource);
|
||||
const baseSpec = createBaseSpec(queryData, requestType, panelType);
|
||||
let spec: QueryEnvelope['spec'];
|
||||
|
||||
const aggregations = createAggregation(queryData);
|
||||
|
||||
switch (signal) {
|
||||
case 'traces':
|
||||
spec = {
|
||||
name: queryName,
|
||||
signal: 'traces' as const,
|
||||
...baseSpec,
|
||||
aggregations: aggregations as TraceAggregation[],
|
||||
};
|
||||
break;
|
||||
case 'logs':
|
||||
spec = {
|
||||
name: queryName,
|
||||
signal: 'logs' as const,
|
||||
...baseSpec,
|
||||
aggregations: aggregations as LogAggregation[],
|
||||
};
|
||||
break;
|
||||
case 'metrics':
|
||||
default:
|
||||
spec = {
|
||||
name: queryName,
|
||||
signal: 'metrics' as const,
|
||||
...baseSpec,
|
||||
aggregations: aggregations as MetricAggregation[],
|
||||
// reduceTo: queryData.reduceTo,
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'builder_query' as QueryType,
|
||||
spec,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts PromQL queries to V5 format
|
||||
*/
|
||||
function convertPromQueriesToV5(
|
||||
promQueries: Record<string, any>, // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
): QueryEnvelope[] {
|
||||
return Object.entries(promQueries).map(
|
||||
([queryName, queryData]): QueryEnvelope => ({
|
||||
type: 'promql' as QueryType,
|
||||
spec: {
|
||||
name: queryName,
|
||||
query: queryData.query,
|
||||
disabled: queryData.disabled || false,
|
||||
step: queryData?.stepInterval,
|
||||
stats: false, // PromQL specific field
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts ClickHouse queries to V5 format
|
||||
*/
|
||||
function convertClickHouseQueriesToV5(
|
||||
chQueries: Record<string, any>, // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
): QueryEnvelope[] {
|
||||
return Object.entries(chQueries).map(
|
||||
([queryName, queryData]): QueryEnvelope => ({
|
||||
type: 'clickhouse_sql' as QueryType,
|
||||
spec: {
|
||||
name: queryName,
|
||||
query: queryData.query,
|
||||
disabled: queryData.disabled || false,
|
||||
// ClickHouse doesn't have step or stats like PromQL
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts query formulas to V5 format
|
||||
*/
|
||||
function convertFormulasToV5(
|
||||
formulas: Record<string, any>, // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
): QueryEnvelope[] {
|
||||
return Object.entries(formulas).map(
|
||||
([queryName, formulaData]): QueryEnvelope => ({
|
||||
type: 'builder_formula' as QueryType,
|
||||
spec: {
|
||||
name: queryName,
|
||||
expression: formulaData.expression || '',
|
||||
functions: formulaData.functions,
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to reduce query arrays to objects
|
||||
*/
|
||||
function reduceQueriesToObject(
|
||||
queryArray: any[], // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
): { queries: Record<string, any>; legends: Record<string, string> } {
|
||||
// eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
const legends: Record<string, string> = {};
|
||||
const queries = queryArray.reduce((acc, queryItem) => {
|
||||
if (!queryItem.query) return acc;
|
||||
acc[queryItem.name] = queryItem;
|
||||
legends[queryItem.name] = queryItem.legend;
|
||||
return acc;
|
||||
}, {} as Record<string, any>); // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
|
||||
return { queries, legends };
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares V5 query range payload from GetQueryResultsProps
|
||||
*/
|
||||
export const prepareQueryRangePayloadV5 = ({
|
||||
query,
|
||||
globalSelectedInterval,
|
||||
graphType,
|
||||
selectedTime,
|
||||
tableParams,
|
||||
variables = {},
|
||||
start: startTime,
|
||||
end: endTime,
|
||||
formatForWeb,
|
||||
originalGraphType,
|
||||
}: GetQueryResultsProps): PrepareQueryRangePayloadV5Result => {
|
||||
let legendMap: Record<string, string> = {};
|
||||
const requestType = mapPanelTypeToRequestType(graphType);
|
||||
let queries: QueryEnvelope[] = [];
|
||||
|
||||
switch (query.queryType) {
|
||||
case EQueryType.QUERY_BUILDER: {
|
||||
const { queryData: data, queryFormulas } = query.builder;
|
||||
const currentQueryData = mapQueryDataToApi(data, 'queryName', tableParams);
|
||||
const currentFormulas = mapQueryDataToApi(queryFormulas, 'queryName');
|
||||
|
||||
// Combine legend maps
|
||||
legendMap = {
|
||||
...currentQueryData.newLegendMap,
|
||||
...currentFormulas.newLegendMap,
|
||||
};
|
||||
|
||||
// Convert builder queries
|
||||
const builderQueries = convertBuilderQueriesToV5(
|
||||
currentQueryData.data,
|
||||
requestType,
|
||||
graphType,
|
||||
);
|
||||
|
||||
// Convert formulas as separate query type
|
||||
const formulaQueries = convertFormulasToV5(currentFormulas.data);
|
||||
|
||||
// Combine both types
|
||||
queries = [...builderQueries, ...formulaQueries];
|
||||
break;
|
||||
}
|
||||
case EQueryType.PROM: {
|
||||
const promQueries = reduceQueriesToObject(query[query.queryType]);
|
||||
queries = convertPromQueriesToV5(promQueries.queries);
|
||||
legendMap = promQueries.legends;
|
||||
break;
|
||||
}
|
||||
case EQueryType.CLICKHOUSE: {
|
||||
const chQueries = reduceQueriesToObject(query[query.queryType]);
|
||||
queries = convertClickHouseQueriesToV5(chQueries.queries);
|
||||
legendMap = chQueries.legends;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
// Calculate time range
|
||||
const { start, end } = getStartEndRangeTime({
|
||||
type: selectedTime,
|
||||
interval: globalSelectedInterval,
|
||||
});
|
||||
|
||||
// Create V5 payload
|
||||
const queryPayload: QueryRangePayloadV5 = {
|
||||
schemaVersion: 'v1',
|
||||
start: startTime ? startTime * 1e3 : parseInt(start, 10) * 1e3,
|
||||
end: endTime ? endTime * 1e3 : parseInt(end, 10) * 1e3,
|
||||
requestType,
|
||||
compositeQuery: {
|
||||
queries,
|
||||
},
|
||||
formatOptions: {
|
||||
formatTableResultForUI:
|
||||
!!formatForWeb ||
|
||||
(originalGraphType
|
||||
? originalGraphType === PANEL_TYPES.TABLE
|
||||
: graphType === PANEL_TYPES.TABLE),
|
||||
},
|
||||
variables: Object.entries(variables).reduce((acc, [key, value]) => {
|
||||
acc[key] = { value };
|
||||
return acc;
|
||||
}, {} as Record<string, VariableItem>),
|
||||
};
|
||||
|
||||
return { legendMap, queryPayload };
|
||||
};
|
||||
8
frontend/src/api/v5/v5.ts
Normal file
8
frontend/src/api/v5/v5.ts
Normal file
@ -0,0 +1,8 @@
|
||||
// V5 API exports
|
||||
export * from './queryRange/constants';
|
||||
export { convertV5ResponseToLegacy } from './queryRange/convertV5Response';
|
||||
export { getQueryRangeV5 } from './queryRange/getQueryRange';
|
||||
export { prepareQueryRangePayloadV5 } from './queryRange/prepareQueryRangePayloadV5';
|
||||
|
||||
// Export types from proper location
|
||||
export * from 'types/api/v5/queryRange';
|
||||
78
frontend/src/constants/antlrQueryConstants.ts
Normal file
78
frontend/src/constants/antlrQueryConstants.ts
Normal file
@ -0,0 +1,78 @@
|
||||
export const OPERATORS = {
|
||||
IN: 'IN',
|
||||
LIKE: 'LIKE',
|
||||
ILIKE: 'ILIKE',
|
||||
REGEXP: 'REGEXP',
|
||||
EXISTS: 'EXISTS',
|
||||
CONTAINS: 'CONTAINS',
|
||||
BETWEEN: 'BETWEEN',
|
||||
NOT: 'NOT',
|
||||
'=': '=',
|
||||
'!=': '!=',
|
||||
'>=': '>=',
|
||||
'>': '>',
|
||||
'<=': '<=',
|
||||
'<': '<',
|
||||
};
|
||||
|
||||
export const NON_VALUE_OPERATORS = [OPERATORS.EXISTS];
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
export enum QUERY_BUILDER_KEY_TYPES {
|
||||
STRING = 'string',
|
||||
NUMBER = 'number',
|
||||
BOOLEAN = 'boolean',
|
||||
}
|
||||
|
||||
export const QUERY_BUILDER_OPERATORS_BY_KEY_TYPE = {
|
||||
[QUERY_BUILDER_KEY_TYPES.STRING]: [
|
||||
OPERATORS['='],
|
||||
OPERATORS['!='],
|
||||
OPERATORS.IN,
|
||||
OPERATORS.LIKE,
|
||||
OPERATORS.ILIKE,
|
||||
OPERATORS.CONTAINS,
|
||||
OPERATORS.EXISTS,
|
||||
OPERATORS.REGEXP,
|
||||
OPERATORS.NOT,
|
||||
],
|
||||
[QUERY_BUILDER_KEY_TYPES.NUMBER]: [
|
||||
OPERATORS['='],
|
||||
OPERATORS['!='],
|
||||
OPERATORS['>='],
|
||||
OPERATORS['>'],
|
||||
OPERATORS['<='],
|
||||
OPERATORS['<'],
|
||||
OPERATORS.IN,
|
||||
OPERATORS.EXISTS,
|
||||
OPERATORS.BETWEEN,
|
||||
OPERATORS.NOT,
|
||||
],
|
||||
[QUERY_BUILDER_KEY_TYPES.BOOLEAN]: [
|
||||
OPERATORS['='],
|
||||
OPERATORS['!='],
|
||||
OPERATORS.EXISTS,
|
||||
OPERATORS.NOT,
|
||||
],
|
||||
};
|
||||
|
||||
export const negationQueryOperatorSuggestions = [
|
||||
{ label: OPERATORS.LIKE, type: 'operator', info: 'Like' },
|
||||
{ label: OPERATORS.ILIKE, type: 'operator', info: 'Case insensitive like' },
|
||||
{ label: OPERATORS.EXISTS, type: 'operator', info: 'Exists' },
|
||||
{ label: OPERATORS.BETWEEN, type: 'operator', info: 'Between' },
|
||||
{ label: OPERATORS.IN, type: 'operator', info: 'In' },
|
||||
{ label: OPERATORS.REGEXP, type: 'operator', info: 'Regular expression' },
|
||||
{ label: OPERATORS.CONTAINS, type: 'operator', info: 'Contains' },
|
||||
];
|
||||
|
||||
export const queryOperatorSuggestions = [
|
||||
{ label: OPERATORS['='], type: 'operator', info: 'Equal to' },
|
||||
{ label: OPERATORS['!='], type: 'operator', info: 'Not equal to' },
|
||||
{ label: OPERATORS['>'], type: 'operator', info: 'Greater than' },
|
||||
{ label: OPERATORS['<'], type: 'operator', info: 'Less than' },
|
||||
{ label: OPERATORS['>='], type: 'operator', info: 'Greater than or equal to' },
|
||||
{ label: OPERATORS['<='], type: 'operator', info: 'Less than or equal to' },
|
||||
{ label: OPERATORS.NOT, type: 'operator', info: 'Not' },
|
||||
...negationQueryOperatorSuggestions,
|
||||
];
|
||||
@ -0,0 +1,73 @@
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { QueryData, QueryDataV3 } from 'types/api/widgets/getQuery';
|
||||
|
||||
export function populateMultipleResults(
|
||||
responseData: SuccessResponse<MetricRangePayloadProps, unknown>,
|
||||
): SuccessResponse<MetricRangePayloadProps, unknown> {
|
||||
const queryResults = responseData?.payload?.data?.newResult?.data?.result;
|
||||
const allFormattedResults: QueryData[] = [];
|
||||
|
||||
queryResults?.forEach((query: QueryDataV3) => {
|
||||
const { queryName, legend, table } = query;
|
||||
if (!table) return;
|
||||
|
||||
const { columns, rows } = table;
|
||||
|
||||
const valueCol = columns?.find((c) => c.isValueColumn);
|
||||
const labelCols = columns?.filter((c) => !c.isValueColumn);
|
||||
|
||||
rows?.forEach((row) => {
|
||||
const metric: Record<string, string> = {};
|
||||
labelCols?.forEach((col) => {
|
||||
metric[col.name] = String(row.data[col.name]);
|
||||
});
|
||||
|
||||
allFormattedResults.push({
|
||||
metric,
|
||||
values: [[0, String(row.data[valueCol!.name])]],
|
||||
queryName,
|
||||
legend: legend || '',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
if (responseData?.payload?.data?.result?.length > 0) {
|
||||
const resultData = responseData.payload.data.result;
|
||||
resultData.forEach((item) => {
|
||||
// Ensure item.values exists and is an array
|
||||
const valuesArray = Array.isArray(item.values) ? item.values : [];
|
||||
// Filter out null/undefined values and sort by timestamp descending
|
||||
const nonNullValues = valuesArray
|
||||
.filter((v: any) => v && v[1] !== null && v[1] !== undefined)
|
||||
.sort((a: any, b: any) => b[0] - a[0]);
|
||||
// Pick the first (latest) value, or fallback to '0'
|
||||
const lastNonNullValue =
|
||||
nonNullValues.length > 0 ? nonNullValues[0][1] : '0';
|
||||
|
||||
allFormattedResults.push({
|
||||
metric: item.metric,
|
||||
values: [[0, String(lastNonNullValue)]],
|
||||
queryName: item.queryName,
|
||||
legend: item.legend || '',
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Create a copy instead of mutating the original
|
||||
const updatedResponseData: SuccessResponse<
|
||||
MetricRangePayloadProps,
|
||||
unknown
|
||||
> = {
|
||||
...responseData,
|
||||
payload: {
|
||||
...responseData.payload,
|
||||
data: {
|
||||
...responseData.payload.data,
|
||||
result: allFormattedResults,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return updatedResponseData;
|
||||
}
|
||||
48
frontend/src/hooks/useGetQueryLabels.ts
Normal file
48
frontend/src/hooks/useGetQueryLabels.ts
Normal file
@ -0,0 +1,48 @@
|
||||
import { getQueryLabelWithAggregation } from 'components/QueryBuilderV2/utils';
|
||||
import { useMemo } from 'react';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
|
||||
export const useGetQueryLabels = (
|
||||
currentQuery: Query,
|
||||
): { label: string; value: string }[] => {
|
||||
const legendMap = useMemo(() => {
|
||||
const newLegendMap: Record<string, string> = {};
|
||||
if (currentQuery?.queryType === EQueryType.QUERY_BUILDER) {
|
||||
currentQuery?.builder?.queryData?.forEach((q) => {
|
||||
if (q.legend) {
|
||||
newLegendMap[q.queryName] = q.legend;
|
||||
}
|
||||
});
|
||||
currentQuery?.builder?.queryFormulas?.forEach((f) => {
|
||||
if (f.legend) {
|
||||
newLegendMap[f.queryName] = f.legend;
|
||||
}
|
||||
});
|
||||
}
|
||||
return newLegendMap;
|
||||
}, [currentQuery]);
|
||||
|
||||
return useMemo(() => {
|
||||
if (currentQuery?.queryType === EQueryType.QUERY_BUILDER) {
|
||||
const queryLabels = getQueryLabelWithAggregation(
|
||||
currentQuery?.builder?.queryData || [],
|
||||
legendMap,
|
||||
);
|
||||
const formulaLabels = currentQuery?.builder?.queryFormulas?.map(
|
||||
(formula) => ({
|
||||
label: formula.queryName,
|
||||
value: formula.queryName,
|
||||
}),
|
||||
);
|
||||
return [...queryLabels, ...formulaLabels];
|
||||
}
|
||||
if (currentQuery?.queryType === EQueryType.CLICKHOUSE) {
|
||||
return currentQuery?.clickhouse_sql?.map((q) => ({
|
||||
label: q.name,
|
||||
value: q.name,
|
||||
}));
|
||||
}
|
||||
return currentQuery?.promql?.map((q) => ({ label: q.name, value: q.name }));
|
||||
}, [currentQuery, legendMap]);
|
||||
};
|
||||
90
frontend/src/parser/FilterQuery.interp
Normal file
90
frontend/src/parser/FilterQuery.interp
Normal file
File diff suppressed because one or more lines are too long
42
frontend/src/parser/FilterQuery.tokens
Normal file
42
frontend/src/parser/FilterQuery.tokens
Normal file
@ -0,0 +1,42 @@
|
||||
LPAREN=1
|
||||
RPAREN=2
|
||||
LBRACK=3
|
||||
RBRACK=4
|
||||
COMMA=5
|
||||
EQUALS=6
|
||||
NOT_EQUALS=7
|
||||
NEQ=8
|
||||
LT=9
|
||||
LE=10
|
||||
GT=11
|
||||
GE=12
|
||||
LIKE=13
|
||||
ILIKE=14
|
||||
BETWEEN=15
|
||||
EXISTS=16
|
||||
REGEXP=17
|
||||
CONTAINS=18
|
||||
IN=19
|
||||
NOT=20
|
||||
AND=21
|
||||
OR=22
|
||||
HAS=23
|
||||
HASANY=24
|
||||
HASALL=25
|
||||
BOOL=26
|
||||
NUMBER=27
|
||||
QUOTED_TEXT=28
|
||||
KEY=29
|
||||
WS=30
|
||||
FREETEXT=31
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
']'=4
|
||||
','=5
|
||||
'!='=7
|
||||
'<>'=8
|
||||
'<'=9
|
||||
'<='=10
|
||||
'>'=11
|
||||
'>='=12
|
||||
115
frontend/src/parser/FilterQueryLexer.interp
Normal file
115
frontend/src/parser/FilterQueryLexer.interp
Normal file
File diff suppressed because one or more lines are too long
42
frontend/src/parser/FilterQueryLexer.tokens
Normal file
42
frontend/src/parser/FilterQueryLexer.tokens
Normal file
@ -0,0 +1,42 @@
|
||||
LPAREN=1
|
||||
RPAREN=2
|
||||
LBRACK=3
|
||||
RBRACK=4
|
||||
COMMA=5
|
||||
EQUALS=6
|
||||
NOT_EQUALS=7
|
||||
NEQ=8
|
||||
LT=9
|
||||
LE=10
|
||||
GT=11
|
||||
GE=12
|
||||
LIKE=13
|
||||
ILIKE=14
|
||||
BETWEEN=15
|
||||
EXISTS=16
|
||||
REGEXP=17
|
||||
CONTAINS=18
|
||||
IN=19
|
||||
NOT=20
|
||||
AND=21
|
||||
OR=22
|
||||
HAS=23
|
||||
HASANY=24
|
||||
HASALL=25
|
||||
BOOL=26
|
||||
NUMBER=27
|
||||
QUOTED_TEXT=28
|
||||
KEY=29
|
||||
WS=30
|
||||
FREETEXT=31
|
||||
'('=1
|
||||
')'=2
|
||||
'['=3
|
||||
']'=4
|
||||
','=5
|
||||
'!='=7
|
||||
'<>'=8
|
||||
'<'=9
|
||||
'<='=10
|
||||
'>'=11
|
||||
'>='=12
|
||||
220
frontend/src/parser/FilterQueryLexer.ts
Normal file
220
frontend/src/parser/FilterQueryLexer.ts
Normal file
@ -0,0 +1,220 @@
|
||||
// Generated from FilterQuery.g4 by ANTLR 4.13.1
|
||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||
import {
|
||||
ATN,
|
||||
ATNDeserializer,
|
||||
CharStream,
|
||||
DecisionState, DFA,
|
||||
Lexer,
|
||||
LexerATNSimulator,
|
||||
RuleContext,
|
||||
PredictionContextCache,
|
||||
Token
|
||||
} from "antlr4";
|
||||
export default class FilterQueryLexer extends Lexer {
|
||||
public static readonly LPAREN = 1;
|
||||
public static readonly RPAREN = 2;
|
||||
public static readonly LBRACK = 3;
|
||||
public static readonly RBRACK = 4;
|
||||
public static readonly COMMA = 5;
|
||||
public static readonly EQUALS = 6;
|
||||
public static readonly NOT_EQUALS = 7;
|
||||
public static readonly NEQ = 8;
|
||||
public static readonly LT = 9;
|
||||
public static readonly LE = 10;
|
||||
public static readonly GT = 11;
|
||||
public static readonly GE = 12;
|
||||
public static readonly LIKE = 13;
|
||||
public static readonly ILIKE = 14;
|
||||
public static readonly BETWEEN = 15;
|
||||
public static readonly EXISTS = 16;
|
||||
public static readonly REGEXP = 17;
|
||||
public static readonly CONTAINS = 18;
|
||||
public static readonly IN = 19;
|
||||
public static readonly NOT = 20;
|
||||
public static readonly AND = 21;
|
||||
public static readonly OR = 22;
|
||||
public static readonly HAS = 23;
|
||||
public static readonly HASANY = 24;
|
||||
public static readonly HASALL = 25;
|
||||
public static readonly BOOL = 26;
|
||||
public static readonly NUMBER = 27;
|
||||
public static readonly QUOTED_TEXT = 28;
|
||||
public static readonly KEY = 29;
|
||||
public static readonly WS = 30;
|
||||
public static readonly FREETEXT = 31;
|
||||
public static readonly EOF = Token.EOF;
|
||||
|
||||
public static readonly channelNames: string[] = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ];
|
||||
public static readonly literalNames: (string | null)[] = [ null, "'('",
|
||||
"')'", "'['",
|
||||
"']'", "','",
|
||||
null, "'!='",
|
||||
"'<>'", "'<'",
|
||||
"'<='", "'>'",
|
||||
"'>='" ];
|
||||
public static readonly symbolicNames: (string | null)[] = [ null, "LPAREN",
|
||||
"RPAREN", "LBRACK",
|
||||
"RBRACK", "COMMA",
|
||||
"EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT",
|
||||
"LE", "GT",
|
||||
"GE", "LIKE",
|
||||
"ILIKE", "BETWEEN",
|
||||
"EXISTS", "REGEXP",
|
||||
"CONTAINS",
|
||||
"IN", "NOT",
|
||||
"AND", "OR",
|
||||
"HAS", "HASANY",
|
||||
"HASALL", "BOOL",
|
||||
"NUMBER", "QUOTED_TEXT",
|
||||
"KEY", "WS",
|
||||
"FREETEXT" ];
|
||||
public static readonly modeNames: string[] = [ "DEFAULT_MODE", ];
|
||||
|
||||
public static readonly ruleNames: string[] = [
|
||||
"LPAREN", "RPAREN", "LBRACK", "RBRACK", "COMMA", "EQUALS", "NOT_EQUALS",
|
||||
"NEQ", "LT", "LE", "GT", "GE", "LIKE", "ILIKE", "BETWEEN", "EXISTS", "REGEXP",
|
||||
"CONTAINS", "IN", "NOT", "AND", "OR", "HAS", "HASANY", "HASALL", "BOOL",
|
||||
"SIGN", "NUMBER", "QUOTED_TEXT", "SEGMENT", "EMPTY_BRACKS", "OLD_JSON_BRACKS",
|
||||
"KEY", "WS", "DIGIT", "FREETEXT",
|
||||
];
|
||||
|
||||
|
||||
constructor(input: CharStream) {
|
||||
super(input);
|
||||
this._interp = new LexerATNSimulator(this, FilterQueryLexer._ATN, FilterQueryLexer.DecisionsToDFA, new PredictionContextCache());
|
||||
}
|
||||
|
||||
public get grammarFileName(): string { return "FilterQuery.g4"; }
|
||||
|
||||
public get literalNames(): (string | null)[] { return FilterQueryLexer.literalNames; }
|
||||
public get symbolicNames(): (string | null)[] { return FilterQueryLexer.symbolicNames; }
|
||||
public get ruleNames(): string[] { return FilterQueryLexer.ruleNames; }
|
||||
|
||||
public get serializedATN(): number[] { return FilterQueryLexer._serializedATN; }
|
||||
|
||||
public get channelNames(): string[] { return FilterQueryLexer.channelNames; }
|
||||
|
||||
public get modeNames(): string[] { return FilterQueryLexer.modeNames; }
|
||||
|
||||
public static readonly _serializedATN: number[] = [4,0,31,303,6,-1,2,0,
|
||||
7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7,6,2,7,7,7,2,8,7,8,2,9,
|
||||
7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13,2,14,7,14,2,15,7,15,2,16,7,
|
||||
16,2,17,7,17,2,18,7,18,2,19,7,19,2,20,7,20,2,21,7,21,2,22,7,22,2,23,7,23,
|
||||
2,24,7,24,2,25,7,25,2,26,7,26,2,27,7,27,2,28,7,28,2,29,7,29,2,30,7,30,2,
|
||||
31,7,31,2,32,7,32,2,33,7,33,2,34,7,34,2,35,7,35,1,0,1,0,1,1,1,1,1,2,1,2,
|
||||
1,3,1,3,1,4,1,4,1,5,1,5,1,5,3,5,87,8,5,1,6,1,6,1,6,1,7,1,7,1,7,1,8,1,8,
|
||||
1,9,1,9,1,9,1,10,1,10,1,11,1,11,1,11,1,12,1,12,1,12,1,12,1,12,1,13,1,13,
|
||||
1,13,1,13,1,13,1,13,1,14,1,14,1,14,1,14,1,14,1,14,1,14,1,14,1,15,1,15,1,
|
||||
15,1,15,1,15,1,15,3,15,130,8,15,1,16,1,16,1,16,1,16,1,16,1,16,1,16,1,17,
|
||||
1,17,1,17,1,17,1,17,1,17,1,17,1,17,3,17,147,8,17,1,18,1,18,1,18,1,19,1,
|
||||
19,1,19,1,19,1,20,1,20,1,20,1,20,1,21,1,21,1,21,1,22,1,22,1,22,1,22,1,23,
|
||||
1,23,1,23,1,23,1,23,1,23,1,23,1,24,1,24,1,24,1,24,1,24,1,24,1,24,1,25,1,
|
||||
25,1,25,1,25,1,25,1,25,1,25,1,25,1,25,3,25,190,8,25,1,26,1,26,1,27,3,27,
|
||||
195,8,27,1,27,4,27,198,8,27,11,27,12,27,199,1,27,1,27,5,27,204,8,27,10,
|
||||
27,12,27,207,9,27,3,27,209,8,27,1,27,1,27,3,27,213,8,27,1,27,4,27,216,8,
|
||||
27,11,27,12,27,217,3,27,220,8,27,1,27,3,27,223,8,27,1,27,1,27,4,27,227,
|
||||
8,27,11,27,12,27,228,1,27,1,27,3,27,233,8,27,1,27,4,27,236,8,27,11,27,12,
|
||||
27,237,3,27,240,8,27,3,27,242,8,27,1,28,1,28,1,28,1,28,5,28,248,8,28,10,
|
||||
28,12,28,251,9,28,1,28,1,28,1,28,1,28,1,28,5,28,258,8,28,10,28,12,28,261,
|
||||
9,28,1,28,3,28,264,8,28,1,29,1,29,5,29,268,8,29,10,29,12,29,271,9,29,1,
|
||||
30,1,30,1,30,1,31,1,31,1,31,1,31,1,32,1,32,1,32,1,32,1,32,5,32,285,8,32,
|
||||
10,32,12,32,288,9,32,1,33,4,33,291,8,33,11,33,12,33,292,1,33,1,33,1,34,
|
||||
1,34,1,35,4,35,300,8,35,11,35,12,35,301,0,0,36,1,1,3,2,5,3,7,4,9,5,11,6,
|
||||
13,7,15,8,17,9,19,10,21,11,23,12,25,13,27,14,29,15,31,16,33,17,35,18,37,
|
||||
19,39,20,41,21,43,22,45,23,47,24,49,25,51,26,53,0,55,27,57,28,59,0,61,0,
|
||||
63,0,65,29,67,30,69,0,71,31,1,0,29,2,0,76,76,108,108,2,0,73,73,105,105,
|
||||
2,0,75,75,107,107,2,0,69,69,101,101,2,0,66,66,98,98,2,0,84,84,116,116,2,
|
||||
0,87,87,119,119,2,0,78,78,110,110,2,0,88,88,120,120,2,0,83,83,115,115,2,
|
||||
0,82,82,114,114,2,0,71,71,103,103,2,0,80,80,112,112,2,0,67,67,99,99,2,0,
|
||||
79,79,111,111,2,0,65,65,97,97,2,0,68,68,100,100,2,0,72,72,104,104,2,0,89,
|
||||
89,121,121,2,0,85,85,117,117,2,0,70,70,102,102,2,0,43,43,45,45,2,0,34,34,
|
||||
92,92,2,0,39,39,92,92,3,0,36,36,65,90,97,122,6,0,36,36,45,45,48,58,65,90,
|
||||
95,95,97,122,3,0,9,10,13,13,32,32,1,0,48,57,8,0,9,10,13,13,32,34,39,41,
|
||||
44,44,60,62,91,91,93,93,325,0,1,1,0,0,0,0,3,1,0,0,0,0,5,1,0,0,0,0,7,1,0,
|
||||
0,0,0,9,1,0,0,0,0,11,1,0,0,0,0,13,1,0,0,0,0,15,1,0,0,0,0,17,1,0,0,0,0,19,
|
||||
1,0,0,0,0,21,1,0,0,0,0,23,1,0,0,0,0,25,1,0,0,0,0,27,1,0,0,0,0,29,1,0,0,
|
||||
0,0,31,1,0,0,0,0,33,1,0,0,0,0,35,1,0,0,0,0,37,1,0,0,0,0,39,1,0,0,0,0,41,
|
||||
1,0,0,0,0,43,1,0,0,0,0,45,1,0,0,0,0,47,1,0,0,0,0,49,1,0,0,0,0,51,1,0,0,
|
||||
0,0,55,1,0,0,0,0,57,1,0,0,0,0,65,1,0,0,0,0,67,1,0,0,0,0,71,1,0,0,0,1,73,
|
||||
1,0,0,0,3,75,1,0,0,0,5,77,1,0,0,0,7,79,1,0,0,0,9,81,1,0,0,0,11,86,1,0,0,
|
||||
0,13,88,1,0,0,0,15,91,1,0,0,0,17,94,1,0,0,0,19,96,1,0,0,0,21,99,1,0,0,0,
|
||||
23,101,1,0,0,0,25,104,1,0,0,0,27,109,1,0,0,0,29,115,1,0,0,0,31,123,1,0,
|
||||
0,0,33,131,1,0,0,0,35,138,1,0,0,0,37,148,1,0,0,0,39,151,1,0,0,0,41,155,
|
||||
1,0,0,0,43,159,1,0,0,0,45,162,1,0,0,0,47,166,1,0,0,0,49,173,1,0,0,0,51,
|
||||
189,1,0,0,0,53,191,1,0,0,0,55,241,1,0,0,0,57,263,1,0,0,0,59,265,1,0,0,0,
|
||||
61,272,1,0,0,0,63,275,1,0,0,0,65,279,1,0,0,0,67,290,1,0,0,0,69,296,1,0,
|
||||
0,0,71,299,1,0,0,0,73,74,5,40,0,0,74,2,1,0,0,0,75,76,5,41,0,0,76,4,1,0,
|
||||
0,0,77,78,5,91,0,0,78,6,1,0,0,0,79,80,5,93,0,0,80,8,1,0,0,0,81,82,5,44,
|
||||
0,0,82,10,1,0,0,0,83,87,5,61,0,0,84,85,5,61,0,0,85,87,5,61,0,0,86,83,1,
|
||||
0,0,0,86,84,1,0,0,0,87,12,1,0,0,0,88,89,5,33,0,0,89,90,5,61,0,0,90,14,1,
|
||||
0,0,0,91,92,5,60,0,0,92,93,5,62,0,0,93,16,1,0,0,0,94,95,5,60,0,0,95,18,
|
||||
1,0,0,0,96,97,5,60,0,0,97,98,5,61,0,0,98,20,1,0,0,0,99,100,5,62,0,0,100,
|
||||
22,1,0,0,0,101,102,5,62,0,0,102,103,5,61,0,0,103,24,1,0,0,0,104,105,7,0,
|
||||
0,0,105,106,7,1,0,0,106,107,7,2,0,0,107,108,7,3,0,0,108,26,1,0,0,0,109,
|
||||
110,7,1,0,0,110,111,7,0,0,0,111,112,7,1,0,0,112,113,7,2,0,0,113,114,7,3,
|
||||
0,0,114,28,1,0,0,0,115,116,7,4,0,0,116,117,7,3,0,0,117,118,7,5,0,0,118,
|
||||
119,7,6,0,0,119,120,7,3,0,0,120,121,7,3,0,0,121,122,7,7,0,0,122,30,1,0,
|
||||
0,0,123,124,7,3,0,0,124,125,7,8,0,0,125,126,7,1,0,0,126,127,7,9,0,0,127,
|
||||
129,7,5,0,0,128,130,7,9,0,0,129,128,1,0,0,0,129,130,1,0,0,0,130,32,1,0,
|
||||
0,0,131,132,7,10,0,0,132,133,7,3,0,0,133,134,7,11,0,0,134,135,7,3,0,0,135,
|
||||
136,7,8,0,0,136,137,7,12,0,0,137,34,1,0,0,0,138,139,7,13,0,0,139,140,7,
|
||||
14,0,0,140,141,7,7,0,0,141,142,7,5,0,0,142,143,7,15,0,0,143,144,7,1,0,0,
|
||||
144,146,7,7,0,0,145,147,7,9,0,0,146,145,1,0,0,0,146,147,1,0,0,0,147,36,
|
||||
1,0,0,0,148,149,7,1,0,0,149,150,7,7,0,0,150,38,1,0,0,0,151,152,7,7,0,0,
|
||||
152,153,7,14,0,0,153,154,7,5,0,0,154,40,1,0,0,0,155,156,7,15,0,0,156,157,
|
||||
7,7,0,0,157,158,7,16,0,0,158,42,1,0,0,0,159,160,7,14,0,0,160,161,7,10,0,
|
||||
0,161,44,1,0,0,0,162,163,7,17,0,0,163,164,7,15,0,0,164,165,7,9,0,0,165,
|
||||
46,1,0,0,0,166,167,7,17,0,0,167,168,7,15,0,0,168,169,7,9,0,0,169,170,7,
|
||||
15,0,0,170,171,7,7,0,0,171,172,7,18,0,0,172,48,1,0,0,0,173,174,7,17,0,0,
|
||||
174,175,7,15,0,0,175,176,7,9,0,0,176,177,7,15,0,0,177,178,7,0,0,0,178,179,
|
||||
7,0,0,0,179,50,1,0,0,0,180,181,7,5,0,0,181,182,7,10,0,0,182,183,7,19,0,
|
||||
0,183,190,7,3,0,0,184,185,7,20,0,0,185,186,7,15,0,0,186,187,7,0,0,0,187,
|
||||
188,7,9,0,0,188,190,7,3,0,0,189,180,1,0,0,0,189,184,1,0,0,0,190,52,1,0,
|
||||
0,0,191,192,7,21,0,0,192,54,1,0,0,0,193,195,3,53,26,0,194,193,1,0,0,0,194,
|
||||
195,1,0,0,0,195,197,1,0,0,0,196,198,3,69,34,0,197,196,1,0,0,0,198,199,1,
|
||||
0,0,0,199,197,1,0,0,0,199,200,1,0,0,0,200,208,1,0,0,0,201,205,5,46,0,0,
|
||||
202,204,3,69,34,0,203,202,1,0,0,0,204,207,1,0,0,0,205,203,1,0,0,0,205,206,
|
||||
1,0,0,0,206,209,1,0,0,0,207,205,1,0,0,0,208,201,1,0,0,0,208,209,1,0,0,0,
|
||||
209,219,1,0,0,0,210,212,7,3,0,0,211,213,3,53,26,0,212,211,1,0,0,0,212,213,
|
||||
1,0,0,0,213,215,1,0,0,0,214,216,3,69,34,0,215,214,1,0,0,0,216,217,1,0,0,
|
||||
0,217,215,1,0,0,0,217,218,1,0,0,0,218,220,1,0,0,0,219,210,1,0,0,0,219,220,
|
||||
1,0,0,0,220,242,1,0,0,0,221,223,3,53,26,0,222,221,1,0,0,0,222,223,1,0,0,
|
||||
0,223,224,1,0,0,0,224,226,5,46,0,0,225,227,3,69,34,0,226,225,1,0,0,0,227,
|
||||
228,1,0,0,0,228,226,1,0,0,0,228,229,1,0,0,0,229,239,1,0,0,0,230,232,7,3,
|
||||
0,0,231,233,3,53,26,0,232,231,1,0,0,0,232,233,1,0,0,0,233,235,1,0,0,0,234,
|
||||
236,3,69,34,0,235,234,1,0,0,0,236,237,1,0,0,0,237,235,1,0,0,0,237,238,1,
|
||||
0,0,0,238,240,1,0,0,0,239,230,1,0,0,0,239,240,1,0,0,0,240,242,1,0,0,0,241,
|
||||
194,1,0,0,0,241,222,1,0,0,0,242,56,1,0,0,0,243,249,5,34,0,0,244,248,8,22,
|
||||
0,0,245,246,5,92,0,0,246,248,9,0,0,0,247,244,1,0,0,0,247,245,1,0,0,0,248,
|
||||
251,1,0,0,0,249,247,1,0,0,0,249,250,1,0,0,0,250,252,1,0,0,0,251,249,1,0,
|
||||
0,0,252,264,5,34,0,0,253,259,5,39,0,0,254,258,8,23,0,0,255,256,5,92,0,0,
|
||||
256,258,9,0,0,0,257,254,1,0,0,0,257,255,1,0,0,0,258,261,1,0,0,0,259,257,
|
||||
1,0,0,0,259,260,1,0,0,0,260,262,1,0,0,0,261,259,1,0,0,0,262,264,5,39,0,
|
||||
0,263,243,1,0,0,0,263,253,1,0,0,0,264,58,1,0,0,0,265,269,7,24,0,0,266,268,
|
||||
7,25,0,0,267,266,1,0,0,0,268,271,1,0,0,0,269,267,1,0,0,0,269,270,1,0,0,
|
||||
0,270,60,1,0,0,0,271,269,1,0,0,0,272,273,5,91,0,0,273,274,5,93,0,0,274,
|
||||
62,1,0,0,0,275,276,5,91,0,0,276,277,5,42,0,0,277,278,5,93,0,0,278,64,1,
|
||||
0,0,0,279,286,3,59,29,0,280,281,5,46,0,0,281,285,3,59,29,0,282,285,3,61,
|
||||
30,0,283,285,3,63,31,0,284,280,1,0,0,0,284,282,1,0,0,0,284,283,1,0,0,0,
|
||||
285,288,1,0,0,0,286,284,1,0,0,0,286,287,1,0,0,0,287,66,1,0,0,0,288,286,
|
||||
1,0,0,0,289,291,7,26,0,0,290,289,1,0,0,0,291,292,1,0,0,0,292,290,1,0,0,
|
||||
0,292,293,1,0,0,0,293,294,1,0,0,0,294,295,6,33,0,0,295,68,1,0,0,0,296,297,
|
||||
7,27,0,0,297,70,1,0,0,0,298,300,8,28,0,0,299,298,1,0,0,0,300,301,1,0,0,
|
||||
0,301,299,1,0,0,0,301,302,1,0,0,0,302,72,1,0,0,0,28,0,86,129,146,189,194,
|
||||
199,205,208,212,217,219,222,228,232,237,239,241,247,249,257,259,263,269,
|
||||
284,286,292,301,1,6,0,0];
|
||||
|
||||
private static __ATN: ATN;
|
||||
public static get _ATN(): ATN {
|
||||
if (!FilterQueryLexer.__ATN) {
|
||||
FilterQueryLexer.__ATN = new ATNDeserializer().deserialize(FilterQueryLexer._serializedATN);
|
||||
}
|
||||
|
||||
return FilterQueryLexer.__ATN;
|
||||
}
|
||||
|
||||
|
||||
static DecisionsToDFA = FilterQueryLexer._ATN.decisionToState.map( (ds: DecisionState, index: number) => new DFA(ds, index) );
|
||||
}
|
||||
201
frontend/src/parser/FilterQueryListener.ts
Normal file
201
frontend/src/parser/FilterQueryListener.ts
Normal file
@ -0,0 +1,201 @@
|
||||
// Generated from FilterQuery.g4 by ANTLR 4.13.1
|
||||
|
||||
import {ParseTreeListener} from "antlr4";
|
||||
|
||||
|
||||
import { QueryContext } from "./FilterQueryParser";
|
||||
import { ExpressionContext } from "./FilterQueryParser";
|
||||
import { OrExpressionContext } from "./FilterQueryParser";
|
||||
import { AndExpressionContext } from "./FilterQueryParser";
|
||||
import { UnaryExpressionContext } from "./FilterQueryParser";
|
||||
import { PrimaryContext } from "./FilterQueryParser";
|
||||
import { ComparisonContext } from "./FilterQueryParser";
|
||||
import { InClauseContext } from "./FilterQueryParser";
|
||||
import { NotInClauseContext } from "./FilterQueryParser";
|
||||
import { ValueListContext } from "./FilterQueryParser";
|
||||
import { FullTextContext } from "./FilterQueryParser";
|
||||
import { FunctionCallContext } from "./FilterQueryParser";
|
||||
import { FunctionParamListContext } from "./FilterQueryParser";
|
||||
import { FunctionParamContext } from "./FilterQueryParser";
|
||||
import { ArrayContext } from "./FilterQueryParser";
|
||||
import { ValueContext } from "./FilterQueryParser";
|
||||
import { KeyContext } from "./FilterQueryParser";
|
||||
|
||||
|
||||
/**
|
||||
* This interface defines a complete listener for a parse tree produced by
|
||||
* `FilterQueryParser`.
|
||||
*/
|
||||
export default class FilterQueryListener extends ParseTreeListener {
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.query`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterQuery?: (ctx: QueryContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.query`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitQuery?: (ctx: QueryContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.expression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterExpression?: (ctx: ExpressionContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.expression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitExpression?: (ctx: ExpressionContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.orExpression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterOrExpression?: (ctx: OrExpressionContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.orExpression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitOrExpression?: (ctx: OrExpressionContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.andExpression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterAndExpression?: (ctx: AndExpressionContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.andExpression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitAndExpression?: (ctx: AndExpressionContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.unaryExpression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterUnaryExpression?: (ctx: UnaryExpressionContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.unaryExpression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitUnaryExpression?: (ctx: UnaryExpressionContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.primary`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterPrimary?: (ctx: PrimaryContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.primary`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitPrimary?: (ctx: PrimaryContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.comparison`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterComparison?: (ctx: ComparisonContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.comparison`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitComparison?: (ctx: ComparisonContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.inClause`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterInClause?: (ctx: InClauseContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.inClause`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitInClause?: (ctx: InClauseContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.notInClause`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterNotInClause?: (ctx: NotInClauseContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.notInClause`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitNotInClause?: (ctx: NotInClauseContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.valueList`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterValueList?: (ctx: ValueListContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.valueList`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitValueList?: (ctx: ValueListContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.fullText`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterFullText?: (ctx: FullTextContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.fullText`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitFullText?: (ctx: FullTextContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.functionCall`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterFunctionCall?: (ctx: FunctionCallContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.functionCall`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitFunctionCall?: (ctx: FunctionCallContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.functionParamList`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterFunctionParamList?: (ctx: FunctionParamListContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.functionParamList`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitFunctionParamList?: (ctx: FunctionParamListContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.functionParam`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterFunctionParam?: (ctx: FunctionParamContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.functionParam`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitFunctionParam?: (ctx: FunctionParamContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.array`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterArray?: (ctx: ArrayContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.array`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitArray?: (ctx: ArrayContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.value`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterValue?: (ctx: ValueContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.value`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitValue?: (ctx: ValueContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `FilterQueryParser.key`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterKey?: (ctx: KeyContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `FilterQueryParser.key`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitKey?: (ctx: KeyContext) => void;
|
||||
}
|
||||
|
||||
1870
frontend/src/parser/FilterQueryParser.ts
Normal file
1870
frontend/src/parser/FilterQueryParser.ts
Normal file
File diff suppressed because it is too large
Load Diff
136
frontend/src/parser/FilterQueryVisitor.ts
Normal file
136
frontend/src/parser/FilterQueryVisitor.ts
Normal file
@ -0,0 +1,136 @@
|
||||
// Generated from FilterQuery.g4 by ANTLR 4.13.1
|
||||
|
||||
import {ParseTreeVisitor} from 'antlr4';
|
||||
|
||||
|
||||
import { QueryContext } from "./FilterQueryParser";
|
||||
import { ExpressionContext } from "./FilterQueryParser";
|
||||
import { OrExpressionContext } from "./FilterQueryParser";
|
||||
import { AndExpressionContext } from "./FilterQueryParser";
|
||||
import { UnaryExpressionContext } from "./FilterQueryParser";
|
||||
import { PrimaryContext } from "./FilterQueryParser";
|
||||
import { ComparisonContext } from "./FilterQueryParser";
|
||||
import { InClauseContext } from "./FilterQueryParser";
|
||||
import { NotInClauseContext } from "./FilterQueryParser";
|
||||
import { ValueListContext } from "./FilterQueryParser";
|
||||
import { FullTextContext } from "./FilterQueryParser";
|
||||
import { FunctionCallContext } from "./FilterQueryParser";
|
||||
import { FunctionParamListContext } from "./FilterQueryParser";
|
||||
import { FunctionParamContext } from "./FilterQueryParser";
|
||||
import { ArrayContext } from "./FilterQueryParser";
|
||||
import { ValueContext } from "./FilterQueryParser";
|
||||
import { KeyContext } from "./FilterQueryParser";
|
||||
|
||||
|
||||
/**
|
||||
* This interface defines a complete generic visitor for a parse tree produced
|
||||
* by `FilterQueryParser`.
|
||||
*
|
||||
* @param <Result> The return type of the visit operation. Use `void` for
|
||||
* operations with no return type.
|
||||
*/
|
||||
export default class FilterQueryVisitor<Result> extends ParseTreeVisitor<Result> {
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.query`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitQuery?: (ctx: QueryContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.expression`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitExpression?: (ctx: ExpressionContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.orExpression`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitOrExpression?: (ctx: OrExpressionContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.andExpression`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitAndExpression?: (ctx: AndExpressionContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.unaryExpression`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitUnaryExpression?: (ctx: UnaryExpressionContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.primary`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitPrimary?: (ctx: PrimaryContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.comparison`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitComparison?: (ctx: ComparisonContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.inClause`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitInClause?: (ctx: InClauseContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.notInClause`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitNotInClause?: (ctx: NotInClauseContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.valueList`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitValueList?: (ctx: ValueListContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.fullText`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitFullText?: (ctx: FullTextContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.functionCall`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitFunctionCall?: (ctx: FunctionCallContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.functionParamList`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitFunctionParamList?: (ctx: FunctionParamListContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.functionParam`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitFunctionParam?: (ctx: FunctionParamContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.array`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitArray?: (ctx: ArrayContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.value`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitValue?: (ctx: ValueContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `FilterQueryParser.key`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitKey?: (ctx: KeyContext) => Result;
|
||||
}
|
||||
|
||||
94
frontend/src/parser/analyzeQuery.ts
Normal file
94
frontend/src/parser/analyzeQuery.ts
Normal file
@ -0,0 +1,94 @@
|
||||
import FilterQueryLexer from './FilterQueryLexer';
|
||||
import FilterQueryParser from './FilterQueryParser';
|
||||
import { ParseTreeWalker, CharStreams, CommonTokenStream, Token } from 'antlr4';
|
||||
import { isOperatorToken } from 'utils/tokenUtils';
|
||||
import FilterQueryListener from './FilterQueryListener';
|
||||
|
||||
import {
|
||||
KeyContext,
|
||||
ValueContext,
|
||||
ComparisonContext,
|
||||
} from './FilterQueryParser';
|
||||
import { IToken } from 'types/antlrQueryTypes';
|
||||
|
||||
// 👇 Define the token classification
|
||||
type TokenClassification = 'Key' | 'Value' | 'Operator';
|
||||
|
||||
interface TokenInfo {
|
||||
text: string;
|
||||
startIndex: number;
|
||||
stopIndex: number;
|
||||
type: TokenClassification;
|
||||
}
|
||||
|
||||
// 👇 Custom listener to walk the parse tree
|
||||
class TypeTrackingListener implements FilterQueryListener {
|
||||
public tokens: TokenInfo[] = [];
|
||||
|
||||
enterKey(ctx: KeyContext) {
|
||||
const token = ctx.KEY().symbol;
|
||||
this.tokens.push({
|
||||
text: token.text!,
|
||||
startIndex: token.start,
|
||||
stopIndex: token.stop,
|
||||
type: 'Key',
|
||||
});
|
||||
}
|
||||
|
||||
enterValue(ctx: ValueContext) {
|
||||
const token = ctx.start;
|
||||
this.tokens.push({
|
||||
text: token.text!,
|
||||
startIndex: token.start,
|
||||
stopIndex: token.stop,
|
||||
type: 'Value',
|
||||
});
|
||||
}
|
||||
|
||||
enterComparison(ctx: ComparisonContext) {
|
||||
const children = ctx.children || [];
|
||||
for (const child of children) {
|
||||
const token = (child as any).symbol;
|
||||
if (token && isOperatorToken(token.type)) {
|
||||
this.tokens.push({
|
||||
text: token.text!,
|
||||
startIndex: token.start,
|
||||
stopIndex: token.stop,
|
||||
type: 'Operator',
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Required no-op stubs
|
||||
enterEveryRule() {}
|
||||
exitEveryRule() {}
|
||||
exitKey() {}
|
||||
exitValue() {}
|
||||
exitComparison() {}
|
||||
visitTerminal() {}
|
||||
visitErrorNode() {}
|
||||
}
|
||||
|
||||
// 👇 Analyze function
|
||||
export function analyzeQuery(input: string, lastToken: IToken) {
|
||||
input = input.trim();
|
||||
const chars = CharStreams.fromString(input);
|
||||
const lexer = new FilterQueryLexer(chars);
|
||||
const tokens = new CommonTokenStream(lexer);
|
||||
const parser = new FilterQueryParser(tokens);
|
||||
|
||||
const tree = parser.query();
|
||||
|
||||
const listener = new TypeTrackingListener();
|
||||
ParseTreeWalker.DEFAULT.walk(listener, tree);
|
||||
|
||||
const currentToken = listener.tokens.find(
|
||||
(token) =>
|
||||
token.text === lastToken.text &&
|
||||
token.startIndex === lastToken.start &&
|
||||
token.stopIndex === lastToken.stop,
|
||||
);
|
||||
|
||||
return currentToken;
|
||||
}
|
||||
72
frontend/src/types/antlrQueryTypes.ts
Normal file
72
frontend/src/types/antlrQueryTypes.ts
Normal file
@ -0,0 +1,72 @@
|
||||
export interface IValidationResult {
|
||||
isValid: boolean;
|
||||
message: string;
|
||||
errors: IDetailedError[];
|
||||
}
|
||||
|
||||
export interface IToken {
|
||||
type: number;
|
||||
text: string;
|
||||
start: number;
|
||||
stop: number;
|
||||
channel?: number;
|
||||
}
|
||||
|
||||
export interface IQueryPair {
|
||||
key: string;
|
||||
operator: string;
|
||||
value?: string;
|
||||
valueList?: string[];
|
||||
hasNegation?: boolean;
|
||||
isMultiValue?: boolean;
|
||||
position: {
|
||||
keyStart: number;
|
||||
keyEnd: number;
|
||||
operatorStart: number;
|
||||
operatorEnd: number;
|
||||
valueStart?: number;
|
||||
valueEnd?: number;
|
||||
negationStart?: number;
|
||||
negationEnd?: number;
|
||||
};
|
||||
valuesPosition?: {
|
||||
start?: number;
|
||||
end?: number;
|
||||
}[];
|
||||
isComplete: boolean; // true if the pair has all three components
|
||||
}
|
||||
|
||||
export interface IQueryContext {
|
||||
tokenType: number;
|
||||
text: string;
|
||||
start: number;
|
||||
stop: number;
|
||||
currentToken: string;
|
||||
isInValue: boolean;
|
||||
isInKey: boolean;
|
||||
isInNegation: boolean;
|
||||
isInOperator: boolean;
|
||||
isInFunction: boolean;
|
||||
isInConjunction?: boolean;
|
||||
isInParenthesis?: boolean;
|
||||
isInBracketList?: boolean; // For multi-value operators like IN where values are in brackets
|
||||
keyToken?: string;
|
||||
operatorToken?: string;
|
||||
valueToken?: string;
|
||||
queryPairs?: IQueryPair[];
|
||||
currentPair?: IQueryPair | null;
|
||||
}
|
||||
|
||||
export interface IDetailedError {
|
||||
message: string;
|
||||
line: number;
|
||||
column: number;
|
||||
offendingSymbol?: string;
|
||||
expectedTokens?: string[];
|
||||
}
|
||||
|
||||
export interface ASTNode {
|
||||
type: string;
|
||||
value?: string;
|
||||
children?: ASTNode[];
|
||||
}
|
||||
47
frontend/src/types/api/querySuggestions/types.ts
Normal file
47
frontend/src/types/api/querySuggestions/types.ts
Normal file
@ -0,0 +1,47 @@
|
||||
import { QUERY_BUILDER_KEY_TYPES } from 'constants/antlrQueryConstants';
|
||||
|
||||
export interface QueryKeyDataSuggestionsProps {
|
||||
label: string;
|
||||
type: string;
|
||||
info?: string;
|
||||
apply?: string;
|
||||
detail?: string;
|
||||
fieldContext?: 'resource' | 'scope' | 'attribute' | 'span';
|
||||
fieldDataType?: QUERY_BUILDER_KEY_TYPES;
|
||||
name: string;
|
||||
signal: 'traces' | 'logs' | 'metrics';
|
||||
}
|
||||
|
||||
export interface QueryKeySuggestionsResponseProps {
|
||||
status: string;
|
||||
data: {
|
||||
complete: boolean;
|
||||
keys: {
|
||||
[key: string]: QueryKeyDataSuggestionsProps[];
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export interface QueryKeyRequestProps {
|
||||
signal: 'traces' | 'logs' | 'metrics';
|
||||
searchText: string;
|
||||
fieldContext?: 'resource' | 'scope' | 'attribute' | 'span';
|
||||
fieldDataType?: QUERY_BUILDER_KEY_TYPES;
|
||||
metricName?: string;
|
||||
}
|
||||
|
||||
export interface QueryKeyValueSuggestionsProps {
|
||||
id: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface QueryKeyValueSuggestionsResponseProps {
|
||||
status: string;
|
||||
data: QueryKeyValueSuggestionsProps[];
|
||||
}
|
||||
|
||||
export interface QueryKeyValueRequestProps {
|
||||
signal: 'traces' | 'logs' | 'metrics';
|
||||
key: string;
|
||||
searchText: string;
|
||||
}
|
||||
421
frontend/src/types/api/v5/queryRange.ts
Normal file
421
frontend/src/types/api/v5/queryRange.ts
Normal file
@ -0,0 +1,421 @@
|
||||
// ===================== Base Types =====================
|
||||
|
||||
export type Step = string | number; // Duration string (e.g., "30s") or seconds as number
|
||||
|
||||
export type RequestType =
|
||||
| 'scalar'
|
||||
| 'time_series'
|
||||
| 'raw'
|
||||
| 'distribution'
|
||||
| '';
|
||||
|
||||
export type QueryType =
|
||||
| 'builder_query'
|
||||
| 'builder_formula'
|
||||
| 'builder_sub_query'
|
||||
| 'builder_join'
|
||||
| 'clickhouse_sql'
|
||||
| 'promql';
|
||||
|
||||
export type OrderDirection = 'asc' | 'desc';
|
||||
|
||||
export type JoinType = 'inner' | 'left' | 'right' | 'full' | 'cross';
|
||||
|
||||
export type SignalType = 'traces' | 'logs' | 'metrics';
|
||||
|
||||
export type DataType = 'string' | 'number' | 'boolean' | 'array';
|
||||
|
||||
export type FieldType =
|
||||
| 'resource'
|
||||
| 'attribute'
|
||||
| 'instrumentation_library'
|
||||
| 'span';
|
||||
|
||||
export type FieldContext =
|
||||
| 'metric'
|
||||
| 'log'
|
||||
| 'span'
|
||||
| 'trace'
|
||||
| 'resource'
|
||||
| 'scope'
|
||||
| 'attribute'
|
||||
| 'event'
|
||||
| '';
|
||||
|
||||
export type FieldDataType =
|
||||
| 'string'
|
||||
| 'bool'
|
||||
| 'float64'
|
||||
| 'int64'
|
||||
| 'number'
|
||||
| '[]string'
|
||||
| '[]float64'
|
||||
| '[]bool'
|
||||
| '[]int64'
|
||||
| '[]number'
|
||||
| '';
|
||||
|
||||
export type FunctionName =
|
||||
| 'cutOffMin'
|
||||
| 'cutOffMax'
|
||||
| 'clampMin'
|
||||
| 'clampMax'
|
||||
| 'absolute'
|
||||
| 'runningDiff'
|
||||
| 'log2'
|
||||
| 'log10'
|
||||
| 'cumSum'
|
||||
| 'ewma3'
|
||||
| 'ewma5'
|
||||
| 'ewma7'
|
||||
| 'median3'
|
||||
| 'median5'
|
||||
| 'median7'
|
||||
| 'timeShift'
|
||||
| 'anomaly';
|
||||
|
||||
export type Temporality = 'cumulative' | 'delta' | '';
|
||||
|
||||
export type MetricType =
|
||||
| 'gauge'
|
||||
| 'sum'
|
||||
| 'histogram'
|
||||
| 'summary'
|
||||
| 'exponential_histogram'
|
||||
| '';
|
||||
|
||||
export type TimeAggregation =
|
||||
| 'latest'
|
||||
| 'sum'
|
||||
| 'avg'
|
||||
| 'min'
|
||||
| 'max'
|
||||
| 'count'
|
||||
| 'count_distinct'
|
||||
| 'rate'
|
||||
| 'increase'
|
||||
| '';
|
||||
|
||||
export type SpaceAggregation =
|
||||
| 'sum'
|
||||
| 'avg'
|
||||
| 'min'
|
||||
| 'max'
|
||||
| 'count'
|
||||
| 'p50'
|
||||
| 'p75'
|
||||
| 'p90'
|
||||
| 'p95'
|
||||
| 'p99'
|
||||
| '';
|
||||
|
||||
export type ColumnType = 'group' | 'aggregation';
|
||||
|
||||
// ===================== Variable Types =====================
|
||||
|
||||
export type VariableType = 'query' | 'dynamic' | 'custom' | 'text';
|
||||
|
||||
export interface VariableItem {
|
||||
type?: VariableType;
|
||||
value: any; // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
}
|
||||
|
||||
// ===================== Core Interface Types =====================
|
||||
|
||||
export interface TelemetryFieldKey {
|
||||
name: string;
|
||||
description?: string;
|
||||
unit?: string;
|
||||
signal?: SignalType;
|
||||
fieldContext?: FieldContext;
|
||||
fieldDataType?: FieldDataType;
|
||||
materialized?: boolean;
|
||||
isColumn?: boolean;
|
||||
isJSON?: boolean;
|
||||
isIndexed?: boolean;
|
||||
}
|
||||
|
||||
export interface Filter {
|
||||
expression: string;
|
||||
}
|
||||
|
||||
export interface Having {
|
||||
expression: string;
|
||||
}
|
||||
|
||||
export type GroupByKey = TelemetryFieldKey;
|
||||
|
||||
export interface OrderBy {
|
||||
key: TelemetryFieldKey;
|
||||
direction: OrderDirection;
|
||||
}
|
||||
|
||||
export interface LimitBy {
|
||||
keys: string[];
|
||||
value: string;
|
||||
}
|
||||
|
||||
export interface QueryRef {
|
||||
name: string;
|
||||
}
|
||||
|
||||
export interface FunctionArg {
|
||||
name?: string;
|
||||
value: string | number;
|
||||
}
|
||||
|
||||
export interface QueryFunction {
|
||||
name: FunctionName;
|
||||
args?: FunctionArg[];
|
||||
}
|
||||
|
||||
// ===================== Aggregation Types =====================
|
||||
|
||||
export interface TraceAggregation {
|
||||
expression: string;
|
||||
alias?: string;
|
||||
}
|
||||
|
||||
export interface LogAggregation {
|
||||
expression: string;
|
||||
alias?: string;
|
||||
}
|
||||
|
||||
export interface MetricAggregation {
|
||||
metricName: string;
|
||||
temporality: Temporality;
|
||||
timeAggregation: TimeAggregation;
|
||||
spaceAggregation: SpaceAggregation;
|
||||
reduceTo?: string;
|
||||
}
|
||||
|
||||
export interface SecondaryAggregation {
|
||||
stepInterval?: Step;
|
||||
expression: string;
|
||||
alias?: string;
|
||||
groupBy?: GroupByKey[];
|
||||
order?: OrderBy[];
|
||||
limit?: number;
|
||||
limitBy?: LimitBy;
|
||||
}
|
||||
|
||||
// ===================== Query Types =====================
|
||||
|
||||
export interface BaseBuilderQuery {
|
||||
name?: string;
|
||||
stepInterval?: Step;
|
||||
disabled?: boolean;
|
||||
filter?: Filter;
|
||||
groupBy?: GroupByKey[];
|
||||
order?: OrderBy[];
|
||||
selectFields?: TelemetryFieldKey[];
|
||||
limit?: number;
|
||||
limitBy?: LimitBy;
|
||||
offset?: number;
|
||||
cursor?: string;
|
||||
having?: Having;
|
||||
secondaryAggregations?: SecondaryAggregation[];
|
||||
functions?: QueryFunction[];
|
||||
legend?: string;
|
||||
}
|
||||
|
||||
export interface TraceBuilderQuery extends BaseBuilderQuery {
|
||||
signal: 'traces';
|
||||
aggregations?: TraceAggregation[];
|
||||
}
|
||||
|
||||
export interface LogBuilderQuery extends BaseBuilderQuery {
|
||||
signal: 'logs';
|
||||
aggregations?: LogAggregation[];
|
||||
}
|
||||
|
||||
export interface MetricBuilderQuery extends BaseBuilderQuery {
|
||||
signal: 'metrics';
|
||||
aggregations?: MetricAggregation[];
|
||||
}
|
||||
|
||||
export type BuilderQuery =
|
||||
| TraceBuilderQuery
|
||||
| LogBuilderQuery
|
||||
| MetricBuilderQuery;
|
||||
|
||||
export interface QueryBuilderFormula {
|
||||
name: string;
|
||||
expression: string;
|
||||
functions?: QueryFunction[];
|
||||
order?: OrderBy[];
|
||||
limit?: number;
|
||||
having?: Having;
|
||||
}
|
||||
|
||||
export interface QueryBuilderJoin {
|
||||
name: string;
|
||||
disabled?: boolean;
|
||||
left: QueryRef;
|
||||
right: QueryRef;
|
||||
type: JoinType;
|
||||
on: string;
|
||||
aggregations?: any[]; // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
selectFields?: TelemetryFieldKey[];
|
||||
filter?: Filter;
|
||||
groupBy?: GroupByKey[];
|
||||
having?: Having;
|
||||
order?: OrderBy[];
|
||||
limit?: number;
|
||||
secondaryAggregations?: SecondaryAggregation[];
|
||||
functions?: QueryFunction[];
|
||||
}
|
||||
|
||||
export interface PromQuery {
|
||||
name: string;
|
||||
query: string;
|
||||
disabled?: boolean;
|
||||
step?: Step;
|
||||
stats?: boolean;
|
||||
}
|
||||
|
||||
export interface ClickHouseQuery {
|
||||
name: string;
|
||||
query: string;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
// ===================== Query Envelope =====================
|
||||
|
||||
export interface QueryEnvelope {
|
||||
type: QueryType;
|
||||
spec:
|
||||
| BuilderQuery // Will be same for both builder_query and builder_sub_query
|
||||
| QueryBuilderFormula
|
||||
| QueryBuilderJoin
|
||||
| PromQuery
|
||||
| ClickHouseQuery;
|
||||
}
|
||||
|
||||
export interface CompositeQuery {
|
||||
queries: QueryEnvelope[];
|
||||
}
|
||||
|
||||
// ===================== Request Types =====================
|
||||
|
||||
export interface QueryRangeRequestV5 {
|
||||
schemaVersion: string;
|
||||
start: number; // epoch milliseconds
|
||||
end: number; // epoch milliseconds
|
||||
requestType: RequestType;
|
||||
compositeQuery: CompositeQuery;
|
||||
variables?: Record<string, VariableItem>;
|
||||
formatOptions?: {
|
||||
formatTableResultForUI: boolean;
|
||||
fillGaps?: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
// ===================== Response Types =====================
|
||||
|
||||
export interface ExecStats {
|
||||
rowsScanned: number;
|
||||
bytesScanned: number;
|
||||
durationMs: number;
|
||||
}
|
||||
|
||||
export interface Label {
|
||||
key: TelemetryFieldKey;
|
||||
value: any; // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
}
|
||||
|
||||
export interface Bucket {
|
||||
step: number;
|
||||
}
|
||||
|
||||
export interface TimeSeriesValue {
|
||||
timestamp: number; // Unix timestamp in milliseconds
|
||||
value: number;
|
||||
values?: number[]; // For heatmap type charts
|
||||
bucket?: Bucket;
|
||||
partial?: boolean;
|
||||
}
|
||||
|
||||
export interface TimeSeries {
|
||||
labels?: Label[];
|
||||
values: TimeSeriesValue[];
|
||||
}
|
||||
|
||||
export interface AggregationBucket {
|
||||
index: number;
|
||||
alias: string;
|
||||
meta: Record<string, any>; // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
series: TimeSeries[];
|
||||
}
|
||||
|
||||
export interface TimeSeriesData {
|
||||
queryName: string;
|
||||
aggregations: AggregationBucket[];
|
||||
}
|
||||
|
||||
export interface ColumnDescriptor extends TelemetryFieldKey {
|
||||
queryName: string;
|
||||
aggregationIndex: number;
|
||||
columnType: ColumnType;
|
||||
meta?: {
|
||||
unit?: string;
|
||||
};
|
||||
}
|
||||
|
||||
export interface ScalarData {
|
||||
columns: ColumnDescriptor[];
|
||||
data: any[][]; // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
}
|
||||
|
||||
export interface RawRow {
|
||||
timestamp: string; // ISO date-time
|
||||
data: Record<string, any>; // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
}
|
||||
|
||||
export interface RawData {
|
||||
queryName: string;
|
||||
nextCursor?: string;
|
||||
rows: RawRow[];
|
||||
}
|
||||
|
||||
export interface DistributionData {
|
||||
// Structure to be defined based on requirements
|
||||
[key: string]: any; // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
}
|
||||
|
||||
// Response data structures with results array
|
||||
export interface TimeSeriesResponseData {
|
||||
results: TimeSeriesData[];
|
||||
}
|
||||
|
||||
export interface ScalarResponseData {
|
||||
results: ScalarData[];
|
||||
}
|
||||
|
||||
export interface RawResponseData {
|
||||
results: RawData[];
|
||||
}
|
||||
|
||||
export interface DistributionResponseData {
|
||||
results: DistributionData[];
|
||||
}
|
||||
|
||||
export type QueryRangeDataV5 =
|
||||
| TimeSeriesResponseData
|
||||
| ScalarResponseData
|
||||
| RawResponseData
|
||||
| DistributionResponseData;
|
||||
|
||||
export interface QueryRangeResponseV5 {
|
||||
type: RequestType;
|
||||
data: QueryRangeDataV5;
|
||||
meta: ExecStats;
|
||||
}
|
||||
|
||||
// ===================== Payload Types for API Functions =====================
|
||||
|
||||
export type QueryRangePayloadV5 = QueryRangeRequestV5;
|
||||
|
||||
export interface MetricRangePayloadV5 {
|
||||
data: QueryRangeResponseV5;
|
||||
}
|
||||
139
frontend/src/utils/aggregationConverter.ts
Normal file
139
frontend/src/utils/aggregationConverter.ts
Normal file
@ -0,0 +1,139 @@
|
||||
import { createAggregation } from 'api/v5/queryRange/prepareQueryRangePayloadV5';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
LogAggregation,
|
||||
MetricAggregation,
|
||||
TraceAggregation,
|
||||
} from 'types/api/v5/queryRange';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
/**
|
||||
* Converts QueryV2 aggregations to BaseAutocompleteData format
|
||||
* for compatibility with existing OrderByFilter component
|
||||
*/
|
||||
export function convertAggregationsToBaseAutocompleteData(
|
||||
aggregations:
|
||||
| TraceAggregation[]
|
||||
| LogAggregation[]
|
||||
| MetricAggregation[]
|
||||
| undefined,
|
||||
dataSource: DataSource,
|
||||
metricName?: string,
|
||||
spaceAggregation?: string,
|
||||
): BaseAutocompleteData[] {
|
||||
// If no aggregations provided, return default based on data source
|
||||
if (!aggregations || aggregations.length === 0) {
|
||||
switch (dataSource) {
|
||||
case DataSource.METRICS:
|
||||
return [
|
||||
{
|
||||
id: uuid(),
|
||||
dataType: DataTypes.Float64,
|
||||
isColumn: false,
|
||||
type: '',
|
||||
isJSON: false,
|
||||
key: `${spaceAggregation || 'avg'}(${metricName || 'metric'})`,
|
||||
},
|
||||
];
|
||||
case DataSource.TRACES:
|
||||
case DataSource.LOGS:
|
||||
default:
|
||||
return [
|
||||
{
|
||||
id: uuid(),
|
||||
dataType: DataTypes.Float64,
|
||||
isColumn: false,
|
||||
type: '',
|
||||
isJSON: false,
|
||||
key: 'count()',
|
||||
},
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
return aggregations.map((agg) => {
|
||||
if ('expression' in agg) {
|
||||
// TraceAggregation or LogAggregation
|
||||
const { expression } = agg;
|
||||
const alias = 'alias' in agg ? agg.alias : '';
|
||||
const displayKey = alias || expression;
|
||||
|
||||
return {
|
||||
id: uuid(),
|
||||
dataType: DataTypes.Float64,
|
||||
isColumn: false,
|
||||
type: '',
|
||||
isJSON: false,
|
||||
key: displayKey,
|
||||
};
|
||||
}
|
||||
// MetricAggregation
|
||||
const {
|
||||
metricName: aggMetricName,
|
||||
spaceAggregation: aggSpaceAggregation,
|
||||
} = agg;
|
||||
const displayKey = `${aggSpaceAggregation}(${aggMetricName})`;
|
||||
|
||||
return {
|
||||
id: uuid(),
|
||||
dataType: DataTypes.Float64,
|
||||
isColumn: false,
|
||||
type: '',
|
||||
isJSON: false,
|
||||
key: displayKey,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to get aggregation options for OrderByFilter
|
||||
* This creates BaseAutocompleteData that can be used with the existing OrderByFilter
|
||||
*/
|
||||
export function getAggregationOptionsForOrderBy(query: {
|
||||
aggregations?: TraceAggregation[] | LogAggregation[] | MetricAggregation[];
|
||||
dataSource: DataSource;
|
||||
aggregateAttribute?: { key: string };
|
||||
spaceAggregation?: string;
|
||||
}): BaseAutocompleteData[] {
|
||||
const {
|
||||
aggregations,
|
||||
dataSource,
|
||||
aggregateAttribute,
|
||||
spaceAggregation,
|
||||
} = query;
|
||||
|
||||
return convertAggregationsToBaseAutocompleteData(
|
||||
aggregations,
|
||||
dataSource,
|
||||
aggregateAttribute?.key,
|
||||
spaceAggregation,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Enhanced function that uses createAggregation to parse aggregations first
|
||||
* then converts them to BaseAutocompleteData format for OrderByFilter
|
||||
*/
|
||||
export function getParsedAggregationOptionsForOrderBy(query: {
|
||||
aggregations?: TraceAggregation[] | LogAggregation[] | MetricAggregation[];
|
||||
dataSource: DataSource;
|
||||
aggregateAttribute?: { key: string };
|
||||
spaceAggregation?: string;
|
||||
timeAggregation?: string;
|
||||
temporality?: string;
|
||||
}): BaseAutocompleteData[] {
|
||||
// First, use createAggregation to parse the aggregations
|
||||
const parsedAggregations = createAggregation(query);
|
||||
|
||||
// Then convert the parsed aggregations to BaseAutocompleteData format
|
||||
return convertAggregationsToBaseAutocompleteData(
|
||||
parsedAggregations,
|
||||
query.dataSource,
|
||||
query.aggregateAttribute?.key,
|
||||
query.spaceAggregation,
|
||||
);
|
||||
}
|
||||
895
frontend/src/utils/antlrQueryUtils.ts
Normal file
895
frontend/src/utils/antlrQueryUtils.ts
Normal file
@ -0,0 +1,895 @@
|
||||
/* eslint-disable sonarjs/no-collapsible-if */
|
||||
/* eslint-disable no-continue */
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { CharStreams, CommonTokenStream } from 'antlr4';
|
||||
import FilterQueryLexer from 'parser/FilterQueryLexer';
|
||||
import FilterQueryParser from 'parser/FilterQueryParser';
|
||||
import {
|
||||
IDetailedError,
|
||||
IQueryContext,
|
||||
IToken,
|
||||
IValidationResult,
|
||||
} from 'types/antlrQueryTypes';
|
||||
|
||||
// Custom error listener to capture ANTLR errors
|
||||
class QueryErrorListener {
|
||||
private errors: IDetailedError[] = [];
|
||||
|
||||
syntaxError(
|
||||
_recognizer: any,
|
||||
offendingSymbol: any,
|
||||
line: number,
|
||||
column: number,
|
||||
msg: string,
|
||||
): void {
|
||||
// For unterminated quotes, we only want to show one error
|
||||
if (this.hasUnterminatedQuoteError() && msg.includes('expecting')) {
|
||||
return;
|
||||
}
|
||||
|
||||
const error: IDetailedError = {
|
||||
message: msg,
|
||||
line,
|
||||
column,
|
||||
offendingSymbol: offendingSymbol?.text || String(offendingSymbol),
|
||||
};
|
||||
|
||||
// Extract expected tokens if available
|
||||
if (msg.includes('expecting')) {
|
||||
const expectedTokens = msg
|
||||
.split('expecting')[1]
|
||||
.trim()
|
||||
.split(',')
|
||||
.map((token) => token.trim());
|
||||
error.expectedTokens = expectedTokens;
|
||||
}
|
||||
|
||||
// Check if this is a duplicate error (same location and similar message)
|
||||
const isDuplicate = this.errors.some(
|
||||
(e) =>
|
||||
e.line === line &&
|
||||
e.column === column &&
|
||||
this.isSimilarError(e.message, msg),
|
||||
);
|
||||
|
||||
if (!isDuplicate) {
|
||||
this.errors.push(error);
|
||||
}
|
||||
}
|
||||
|
||||
private hasUnterminatedQuoteError(): boolean {
|
||||
return this.errors.some(
|
||||
(error) =>
|
||||
error.message.includes('unterminated') ||
|
||||
(error.message.includes('missing') && error.message.includes("'")),
|
||||
);
|
||||
}
|
||||
|
||||
private isSimilarError = (msg1: string, msg2: string): boolean => {
|
||||
// Consider errors similar if they're for the same core issue
|
||||
const normalize = (msg: string): string =>
|
||||
msg.toLowerCase().replace(/['"`]/g, 'quote').replace(/\s+/g, ' ').trim();
|
||||
|
||||
return normalize(msg1) === normalize(msg2);
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
reportAmbiguity = (): void => {};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
reportAttemptingFullContext = (): void => {};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-empty-function
|
||||
reportContextSensitivity = (): void => {};
|
||||
|
||||
getErrors(): IDetailedError[] {
|
||||
return this.errors;
|
||||
}
|
||||
|
||||
hasErrors(): boolean {
|
||||
return this.errors.length > 0;
|
||||
}
|
||||
|
||||
getFormattedErrors(): string[] {
|
||||
return this.errors.map((error) => {
|
||||
const {
|
||||
offendingSymbol,
|
||||
expectedTokens,
|
||||
message: errorMessage,
|
||||
line,
|
||||
column,
|
||||
} = error;
|
||||
|
||||
let message = `Line ${line}:${column} - ${errorMessage}`;
|
||||
|
||||
if (offendingSymbol && offendingSymbol !== 'undefined') {
|
||||
message += `\n Symbol: '${offendingSymbol}'`;
|
||||
}
|
||||
|
||||
if (expectedTokens && expectedTokens.length > 0) {
|
||||
message += `\n Expected: ${expectedTokens.join(', ')}`;
|
||||
}
|
||||
|
||||
return message;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export const validateQuery = (query: string): IValidationResult => {
|
||||
// Empty query is considered invalid
|
||||
if (!query.trim()) {
|
||||
return {
|
||||
isValid: true,
|
||||
message: 'Query is empty',
|
||||
errors: [],
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const errorListener = new QueryErrorListener();
|
||||
const inputStream = CharStreams.fromString(query);
|
||||
|
||||
// Setup lexer
|
||||
const lexer = new FilterQueryLexer(inputStream);
|
||||
lexer.removeErrorListeners(); // Remove default error listeners
|
||||
lexer.addErrorListener(errorListener);
|
||||
|
||||
// Setup parser
|
||||
const tokenStream = new CommonTokenStream(lexer);
|
||||
const parser = new FilterQueryParser(tokenStream);
|
||||
parser.removeErrorListeners(); // Remove default error listeners
|
||||
parser.addErrorListener(errorListener);
|
||||
|
||||
// Try parsing
|
||||
parser.query();
|
||||
|
||||
// Check if any errors were captured
|
||||
if (errorListener.hasErrors()) {
|
||||
return {
|
||||
isValid: false,
|
||||
message: 'Query syntax error',
|
||||
errors: errorListener.getErrors(),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: true,
|
||||
message: 'Query is valid!',
|
||||
errors: [],
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : 'Invalid query syntax';
|
||||
|
||||
const detailedError: IDetailedError = {
|
||||
message: errorMessage,
|
||||
line: 0,
|
||||
column: 0,
|
||||
offendingSymbol: '',
|
||||
expectedTokens: [],
|
||||
};
|
||||
return {
|
||||
isValid: false,
|
||||
message: 'Invalid query syntax',
|
||||
errors: [detailedError],
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
// Helper function to find key-operator-value triplets in token stream
|
||||
export function findKeyOperatorValueTriplet(
|
||||
allTokens: IToken[],
|
||||
currentToken: IToken,
|
||||
isInKey: boolean,
|
||||
isInOperator: boolean,
|
||||
isInValue: boolean,
|
||||
): { keyToken?: string; operatorToken?: string; valueToken?: string } {
|
||||
// Find current token index in allTokens
|
||||
let currentTokenIndex = -1;
|
||||
for (let i = 0; i < allTokens.length; i++) {
|
||||
if (
|
||||
allTokens[i].start === currentToken.start &&
|
||||
allTokens[i].stop === currentToken.stop &&
|
||||
allTokens[i].type === currentToken.type
|
||||
) {
|
||||
currentTokenIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (currentTokenIndex === -1) return {};
|
||||
|
||||
// Initialize result with empty object
|
||||
const result: {
|
||||
keyToken?: string;
|
||||
operatorToken?: string;
|
||||
valueToken?: string;
|
||||
} = {};
|
||||
|
||||
if (isInKey) {
|
||||
// When in key context, we only know the key
|
||||
result.keyToken = currentToken.text;
|
||||
} else if (isInOperator) {
|
||||
// When in operator context, we know the operator and can find the preceding key
|
||||
result.operatorToken = currentToken.text;
|
||||
|
||||
// Look backward for key
|
||||
for (let i = currentTokenIndex - 1; i >= 0; i--) {
|
||||
const token = allTokens[i];
|
||||
// Skip whitespace and other hidden channel tokens
|
||||
if (token.channel !== 0) continue;
|
||||
|
||||
if (token.type === FilterQueryLexer.KEY) {
|
||||
result.keyToken = token.text;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (isInValue) {
|
||||
// When in value context, we know the value and can find the preceding operator and key
|
||||
result.valueToken = currentToken.text;
|
||||
|
||||
let foundOperator = false;
|
||||
|
||||
// Look backward for operator and key
|
||||
for (let i = currentTokenIndex - 1; i >= 0; i--) {
|
||||
const token = allTokens[i];
|
||||
// Skip whitespace and other hidden channel tokens
|
||||
if (token.channel !== 0) continue;
|
||||
|
||||
// If we haven't found an operator yet, check for operator
|
||||
if (
|
||||
!foundOperator &&
|
||||
[
|
||||
FilterQueryLexer.EQUALS,
|
||||
FilterQueryLexer.NOT_EQUALS,
|
||||
FilterQueryLexer.NEQ,
|
||||
FilterQueryLexer.LT,
|
||||
FilterQueryLexer.LE,
|
||||
FilterQueryLexer.GT,
|
||||
FilterQueryLexer.GE,
|
||||
FilterQueryLexer.LIKE,
|
||||
// FilterQueryLexer.NOT_LIKE,
|
||||
FilterQueryLexer.ILIKE,
|
||||
// FilterQueryLexer.NOT_ILIKE,
|
||||
FilterQueryLexer.BETWEEN,
|
||||
FilterQueryLexer.EXISTS,
|
||||
FilterQueryLexer.REGEXP,
|
||||
FilterQueryLexer.CONTAINS,
|
||||
FilterQueryLexer.IN,
|
||||
FilterQueryLexer.NOT,
|
||||
].includes(token.type)
|
||||
) {
|
||||
result.operatorToken = token.text;
|
||||
foundOperator = true;
|
||||
}
|
||||
// If we already found an operator and this is a key, record it
|
||||
else if (foundOperator && token.type === FilterQueryLexer.KEY) {
|
||||
result.keyToken = token.text;
|
||||
break; // We found our triplet
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function getQueryContextAtCursor(
|
||||
query: string,
|
||||
cursorIndex: number,
|
||||
): IQueryContext {
|
||||
try {
|
||||
// Create input stream and lexer
|
||||
const input = query || '';
|
||||
const chars = CharStreams.fromString(input);
|
||||
const lexer = new FilterQueryLexer(chars);
|
||||
|
||||
// Create token stream and force token generation
|
||||
const tokenStream = new CommonTokenStream(lexer);
|
||||
tokenStream.fill();
|
||||
|
||||
// Get all tokens including whitespace
|
||||
const allTokens = tokenStream.tokens as IToken[];
|
||||
|
||||
// Find exact token at cursor, including whitespace
|
||||
let exactToken: IToken | null = null;
|
||||
let previousToken: IToken | null = null;
|
||||
let nextToken: IToken | null = null;
|
||||
|
||||
// Handle cursor at the very end of input
|
||||
if (cursorIndex === input.length && allTokens.length > 0) {
|
||||
const lastRealToken = allTokens
|
||||
.filter((t) => t.type !== FilterQueryLexer.EOF)
|
||||
.pop();
|
||||
if (lastRealToken) {
|
||||
exactToken = lastRealToken;
|
||||
previousToken =
|
||||
allTokens.filter((t) => t.stop < lastRealToken.start).pop() || null;
|
||||
}
|
||||
} else {
|
||||
// Normal token search
|
||||
for (let i = 0; i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
// Skip EOF token in normal search
|
||||
if (token.type === FilterQueryLexer.EOF) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if cursor is within token bounds (inclusive)
|
||||
if (token.start <= cursorIndex && cursorIndex <= token.stop + 1) {
|
||||
exactToken = token;
|
||||
previousToken = i > 0 ? allTokens[i - 1] : null;
|
||||
nextToken = i < allTokens.length - 1 ? allTokens[i + 1] : null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If cursor is between tokens, find surrounding tokens
|
||||
if (!exactToken) {
|
||||
for (let i = 0; i < allTokens.length - 1; i++) {
|
||||
const current = allTokens[i];
|
||||
const next = allTokens[i + 1];
|
||||
if (current.type === FilterQueryLexer.EOF) {
|
||||
continue;
|
||||
}
|
||||
if (next.type === FilterQueryLexer.EOF) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (current.stop + 1 < cursorIndex && cursorIndex < next.start) {
|
||||
previousToken = current;
|
||||
nextToken = next;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Determine the context based on cursor position and surrounding tokens
|
||||
let currentToken: IToken | null = null;
|
||||
|
||||
if (exactToken) {
|
||||
// If cursor is in a non-whitespace token, use that
|
||||
if (exactToken.channel === 0) {
|
||||
currentToken = exactToken;
|
||||
} else {
|
||||
// If in whitespace, use the previous non-whitespace token
|
||||
currentToken = previousToken?.channel === 0 ? previousToken : nextToken;
|
||||
}
|
||||
} else if (previousToken?.channel === 0) {
|
||||
// If between tokens, prefer the previous non-whitespace token
|
||||
currentToken = previousToken;
|
||||
} else if (nextToken?.channel === 0) {
|
||||
// Otherwise use the next non-whitespace token
|
||||
currentToken = nextToken;
|
||||
}
|
||||
|
||||
// If still no token (empty query or all whitespace), return default context
|
||||
if (!currentToken) {
|
||||
// Handle transitions based on spaces and current state
|
||||
if (query.trim() === '') {
|
||||
return {
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: '',
|
||||
isInValue: false,
|
||||
isInKey: true, // Default to key context when input is empty
|
||||
isInNegation: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
};
|
||||
}
|
||||
return {
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: '',
|
||||
isInValue: false,
|
||||
isInNegation: false,
|
||||
isInKey: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
};
|
||||
}
|
||||
|
||||
// Determine if the current token is a conjunction (AND or OR)
|
||||
const isInConjunction = [FilterQueryLexer.AND, FilterQueryLexer.OR].includes(
|
||||
currentToken.type,
|
||||
);
|
||||
|
||||
// Determine if the current token is a parenthesis or bracket
|
||||
const isInParenthesis = [
|
||||
FilterQueryLexer.LPAREN,
|
||||
FilterQueryLexer.RPAREN,
|
||||
FilterQueryLexer.LBRACK,
|
||||
FilterQueryLexer.RBRACK,
|
||||
].includes(currentToken.type);
|
||||
|
||||
// Determine the context based on the token type
|
||||
const isInValue = [
|
||||
FilterQueryLexer.QUOTED_TEXT,
|
||||
FilterQueryLexer.NUMBER,
|
||||
FilterQueryLexer.BOOL,
|
||||
].includes(currentToken.type);
|
||||
|
||||
const isInKey = currentToken.type === FilterQueryLexer.KEY;
|
||||
|
||||
const isInNegation = currentToken.type === FilterQueryLexer.NOT;
|
||||
|
||||
const isInOperator = [
|
||||
FilterQueryLexer.EQUALS,
|
||||
FilterQueryLexer.NOT_EQUALS,
|
||||
FilterQueryLexer.NEQ,
|
||||
FilterQueryLexer.LT,
|
||||
FilterQueryLexer.LE,
|
||||
FilterQueryLexer.GT,
|
||||
FilterQueryLexer.GE,
|
||||
FilterQueryLexer.LIKE,
|
||||
// FilterQueryLexer.NOT_LIKE,
|
||||
FilterQueryLexer.ILIKE,
|
||||
// FilterQueryLexer.NOT_ILIKE,
|
||||
FilterQueryLexer.BETWEEN,
|
||||
FilterQueryLexer.EXISTS,
|
||||
FilterQueryLexer.REGEXP,
|
||||
FilterQueryLexer.CONTAINS,
|
||||
FilterQueryLexer.IN,
|
||||
FilterQueryLexer.NOT,
|
||||
].includes(currentToken.type);
|
||||
|
||||
const isInFunction = [
|
||||
FilterQueryLexer.HAS,
|
||||
FilterQueryLexer.HASANY,
|
||||
FilterQueryLexer.HASALL,
|
||||
// FilterQueryLexer.HASNONE,
|
||||
].includes(currentToken.type);
|
||||
|
||||
// Get the context-related tokens (key, operator, value)
|
||||
const relationTokens = findKeyOperatorValueTriplet(
|
||||
allTokens,
|
||||
currentToken,
|
||||
isInKey,
|
||||
isInOperator,
|
||||
isInValue,
|
||||
);
|
||||
|
||||
// Handle transitions based on spaces
|
||||
// When a user adds a space after a token, change the context accordingly
|
||||
if (
|
||||
currentToken &&
|
||||
cursorIndex === currentToken.stop + 2 &&
|
||||
query[currentToken.stop + 1] === ' '
|
||||
) {
|
||||
// User added a space right after this token
|
||||
|
||||
if (isInKey) {
|
||||
// After a key + space, we should be in operator context
|
||||
return {
|
||||
tokenType: currentToken.type,
|
||||
text: currentToken.text,
|
||||
start: currentToken.start,
|
||||
stop: currentToken.stop,
|
||||
currentToken: currentToken.text,
|
||||
isInValue: false,
|
||||
isInKey: false,
|
||||
isInNegation: false,
|
||||
isInOperator: true,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
|
||||
if (isInOperator) {
|
||||
// After an operator + space, we should be in value context
|
||||
return {
|
||||
tokenType: currentToken.type,
|
||||
text: currentToken.text,
|
||||
start: currentToken.start,
|
||||
stop: currentToken.stop,
|
||||
currentToken: currentToken.text,
|
||||
isInValue: true,
|
||||
isInKey: false,
|
||||
isInNegation: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
|
||||
if (isInValue) {
|
||||
// After a value + space, we should be in conjunction context
|
||||
return {
|
||||
tokenType: currentToken.type,
|
||||
text: currentToken.text,
|
||||
start: currentToken.start,
|
||||
stop: currentToken.stop,
|
||||
currentToken: currentToken.text,
|
||||
isInValue: false,
|
||||
isInKey: false,
|
||||
isInNegation: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: true,
|
||||
isInParenthesis: false,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
|
||||
if (isInConjunction) {
|
||||
// After a conjunction + space, we should be in key context again
|
||||
return {
|
||||
tokenType: currentToken.type,
|
||||
text: currentToken.text,
|
||||
start: currentToken.start,
|
||||
stop: currentToken.stop,
|
||||
currentToken: currentToken.text,
|
||||
isInValue: false,
|
||||
isInNegation: false,
|
||||
isInKey: true,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
|
||||
if (isInParenthesis) {
|
||||
// After a parenthesis/bracket + space, determine context based on which bracket
|
||||
if (currentToken.type === FilterQueryLexer.LPAREN) {
|
||||
// After an opening parenthesis + space, we should be in key context
|
||||
return {
|
||||
tokenType: currentToken.type,
|
||||
text: currentToken.text,
|
||||
start: currentToken.start,
|
||||
stop: currentToken.stop,
|
||||
currentToken: currentToken.text,
|
||||
isInValue: false,
|
||||
isInNegation: false,
|
||||
isInKey: true,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
...relationTokens,
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
currentToken.type === FilterQueryLexer.RPAREN ||
|
||||
currentToken.type === FilterQueryLexer.RBRACK
|
||||
) {
|
||||
// After a closing parenthesis/bracket + space, we should be in conjunction context
|
||||
return {
|
||||
tokenType: currentToken.type,
|
||||
text: currentToken.text,
|
||||
start: currentToken.start,
|
||||
stop: currentToken.stop,
|
||||
currentToken: currentToken.text,
|
||||
isInValue: false,
|
||||
isInNegation: false,
|
||||
isInKey: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: true,
|
||||
isInParenthesis: false,
|
||||
...relationTokens,
|
||||
};
|
||||
}
|
||||
|
||||
if (currentToken.type === FilterQueryLexer.LBRACK) {
|
||||
// After an opening bracket + space, we should be in value context (for arrays)
|
||||
return {
|
||||
tokenType: currentToken.type,
|
||||
text: currentToken.text,
|
||||
start: currentToken.start,
|
||||
stop: currentToken.stop,
|
||||
currentToken: currentToken.text,
|
||||
isInValue: true,
|
||||
isInNegation: false,
|
||||
isInKey: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
...relationTokens,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add logic for context detection that works for both forward and backward navigation
|
||||
// This handles both cases: when user is typing forward and when they're moving backward
|
||||
if (previousToken && nextToken) {
|
||||
// Determine context based on token sequence pattern
|
||||
|
||||
// Key -> Operator -> Value -> Conjunction pattern detection
|
||||
if (isInKey && nextToken.type === FilterQueryLexer.EQUALS) {
|
||||
// When cursor is on a key and next token is an operator
|
||||
return {
|
||||
tokenType: currentToken.type,
|
||||
text: currentToken.text,
|
||||
start: currentToken.start,
|
||||
stop: currentToken.stop,
|
||||
currentToken: currentToken.text,
|
||||
isInValue: false,
|
||||
isInKey: true,
|
||||
isInNegation: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
|
||||
if (isInNegation && nextToken.type === FilterQueryLexer.NOT) {
|
||||
return {
|
||||
tokenType: currentToken.type,
|
||||
text: currentToken.text,
|
||||
start: currentToken.start,
|
||||
stop: currentToken.stop,
|
||||
currentToken: currentToken.text,
|
||||
isInValue: false,
|
||||
isInKey: false,
|
||||
isInNegation: true,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
isInOperator &&
|
||||
previousToken.type === FilterQueryLexer.KEY &&
|
||||
(nextToken.type === FilterQueryLexer.QUOTED_TEXT ||
|
||||
nextToken.type === FilterQueryLexer.NUMBER ||
|
||||
nextToken.type === FilterQueryLexer.BOOL)
|
||||
) {
|
||||
// When cursor is on an operator between a key and value
|
||||
return {
|
||||
tokenType: currentToken.type,
|
||||
text: currentToken.text,
|
||||
start: currentToken.start,
|
||||
stop: currentToken.stop,
|
||||
currentToken: currentToken.text,
|
||||
isInValue: false,
|
||||
isInKey: false,
|
||||
isInNegation: false,
|
||||
isInOperator: true,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
isInValue &&
|
||||
previousToken.type !== FilterQueryLexer.AND &&
|
||||
previousToken.type !== FilterQueryLexer.OR &&
|
||||
(nextToken.type === FilterQueryLexer.AND ||
|
||||
nextToken.type === FilterQueryLexer.OR)
|
||||
) {
|
||||
// When cursor is on a value and next token is a conjunction
|
||||
return {
|
||||
tokenType: currentToken.type,
|
||||
text: currentToken.text,
|
||||
start: currentToken.start,
|
||||
stop: currentToken.stop,
|
||||
currentToken: currentToken.text,
|
||||
isInValue: true,
|
||||
isInKey: false,
|
||||
isInNegation: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
isInConjunction &&
|
||||
(previousToken.type === FilterQueryLexer.QUOTED_TEXT ||
|
||||
previousToken.type === FilterQueryLexer.NUMBER ||
|
||||
previousToken.type === FilterQueryLexer.BOOL) &&
|
||||
nextToken.type === FilterQueryLexer.KEY
|
||||
) {
|
||||
// When cursor is on a conjunction between a value and a key
|
||||
return {
|
||||
tokenType: currentToken.type,
|
||||
text: currentToken.text,
|
||||
start: currentToken.start,
|
||||
stop: currentToken.stop,
|
||||
currentToken: currentToken.text,
|
||||
isInValue: false,
|
||||
isInKey: false,
|
||||
isInNegation: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: true,
|
||||
isInParenthesis: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// If we're in between tokens (no exact token match), use next token type to determine context
|
||||
if (!exactToken && nextToken) {
|
||||
if (nextToken.type === FilterQueryLexer.KEY) {
|
||||
return {
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: '',
|
||||
isInValue: false,
|
||||
isInKey: true,
|
||||
isInNegation: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
|
||||
if (nextToken.type === FilterQueryLexer.NOT) {
|
||||
return {
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: '',
|
||||
isInValue: false,
|
||||
isInKey: false,
|
||||
isInNegation: true,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
[
|
||||
FilterQueryLexer.EQUALS,
|
||||
FilterQueryLexer.NOT_EQUALS,
|
||||
FilterQueryLexer.GT,
|
||||
FilterQueryLexer.LT,
|
||||
FilterQueryLexer.GE,
|
||||
FilterQueryLexer.LE,
|
||||
].includes(nextToken.type)
|
||||
) {
|
||||
return {
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: '',
|
||||
isInValue: false,
|
||||
isInKey: false,
|
||||
isInNegation: false,
|
||||
isInOperator: true,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
[
|
||||
FilterQueryLexer.QUOTED_TEXT,
|
||||
FilterQueryLexer.NUMBER,
|
||||
FilterQueryLexer.BOOL,
|
||||
].includes(nextToken.type)
|
||||
) {
|
||||
return {
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: '',
|
||||
isInNegation: false,
|
||||
isInValue: true,
|
||||
isInKey: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
|
||||
if ([FilterQueryLexer.AND, FilterQueryLexer.OR].includes(nextToken.type)) {
|
||||
return {
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: '',
|
||||
isInValue: false,
|
||||
isInKey: false,
|
||||
isInNegation: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: true,
|
||||
isInParenthesis: false,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
|
||||
// Add case for parentheses and brackets
|
||||
if (
|
||||
[
|
||||
FilterQueryLexer.LPAREN,
|
||||
FilterQueryLexer.RPAREN,
|
||||
FilterQueryLexer.LBRACK,
|
||||
FilterQueryLexer.RBRACK,
|
||||
].includes(nextToken.type)
|
||||
) {
|
||||
return {
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: '',
|
||||
isInValue: false,
|
||||
isInKey: false,
|
||||
isInNegation: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: true,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to default context detection based on current token
|
||||
return {
|
||||
tokenType: currentToken.type,
|
||||
text: currentToken.text,
|
||||
start: currentToken.start,
|
||||
stop: currentToken.stop,
|
||||
currentToken: currentToken.text,
|
||||
isInValue,
|
||||
isInKey,
|
||||
isInNegation,
|
||||
isInOperator,
|
||||
isInFunction,
|
||||
isInConjunction,
|
||||
isInParenthesis,
|
||||
...relationTokens, // Include related tokens
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error in getQueryContextAtCursor:', error);
|
||||
return {
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: '',
|
||||
isInValue: false,
|
||||
isInKey: false,
|
||||
isInNegation: false,
|
||||
isInOperator: false,
|
||||
isInFunction: false,
|
||||
isInConjunction: false,
|
||||
isInParenthesis: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
159
frontend/src/utils/antlrQueryUtils2.ts
Normal file
159
frontend/src/utils/antlrQueryUtils2.ts
Normal file
@ -0,0 +1,159 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
/* eslint-disable no-restricted-syntax */
|
||||
import antlr4, { CharStreams } from 'antlr4';
|
||||
import cloneDeep from 'lodash-es/cloneDeep';
|
||||
import FilterQueryLexer from 'parser/FilterQueryLexer';
|
||||
|
||||
export enum CursorContext {
|
||||
Key,
|
||||
Operator,
|
||||
Value,
|
||||
NoFilter,
|
||||
FullText,
|
||||
}
|
||||
|
||||
const contextNames = ['Key', 'Operator', 'Value', 'NoFilter', 'FullText'];
|
||||
|
||||
export function contextToString(context: CursorContext): string {
|
||||
return contextNames[context];
|
||||
}
|
||||
|
||||
export interface ContextInfo {
|
||||
context: CursorContext;
|
||||
key?: string;
|
||||
token?: antlr4.Token;
|
||||
operator?: string;
|
||||
}
|
||||
|
||||
export function detectContext(
|
||||
query: string,
|
||||
cursorOffset: number,
|
||||
): ContextInfo {
|
||||
console.log('query', query);
|
||||
console.log('cursorOffset', cursorOffset);
|
||||
|
||||
const chars = CharStreams.fromString(query);
|
||||
const lexer = new FilterQueryLexer(chars);
|
||||
const tokens = new antlr4.CommonTokenStream(lexer);
|
||||
tokens.fill();
|
||||
|
||||
enum State {
|
||||
ExpectKey,
|
||||
ExpectOperator,
|
||||
ExpectValue,
|
||||
}
|
||||
|
||||
let state = State.ExpectKey;
|
||||
let parens = 0;
|
||||
let array = 0;
|
||||
let lastKey: antlr4.Token | undefined;
|
||||
let lastOperator: antlr4.Token | undefined;
|
||||
let cursorTok: antlr4.Token | undefined;
|
||||
let pos = 0;
|
||||
|
||||
for (const tok of tokens.tokens) {
|
||||
const text = tok.text || '';
|
||||
|
||||
if (
|
||||
tok.channel === antlr4.Token.DEFAULT_CHANNEL &&
|
||||
pos <= cursorOffset &&
|
||||
cursorOffset <= pos + text.length
|
||||
) {
|
||||
cursorTok = tok;
|
||||
break;
|
||||
}
|
||||
|
||||
switch (tok.type) {
|
||||
case FilterQueryLexer.LPAREN:
|
||||
parens++;
|
||||
state = State.ExpectKey;
|
||||
break;
|
||||
case FilterQueryLexer.RPAREN:
|
||||
if (parens > 0) parens--;
|
||||
state = State.ExpectOperator;
|
||||
break;
|
||||
case FilterQueryLexer.LBRACK:
|
||||
array++;
|
||||
state = State.ExpectValue;
|
||||
break;
|
||||
case FilterQueryLexer.RBRACK:
|
||||
if (array > 0) array--;
|
||||
state = State.ExpectOperator;
|
||||
break;
|
||||
case FilterQueryLexer.COMMA:
|
||||
if (array > 0) state = State.ExpectValue;
|
||||
break;
|
||||
case FilterQueryLexer.KEY:
|
||||
if (state === State.ExpectKey) {
|
||||
lastKey = tok;
|
||||
state = State.ExpectOperator;
|
||||
}
|
||||
break;
|
||||
case FilterQueryLexer.QUOTED_TEXT:
|
||||
case FilterQueryLexer.NUMBER:
|
||||
case FilterQueryLexer.BOOL:
|
||||
if (state === State.ExpectValue) {
|
||||
state = State.ExpectOperator;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (
|
||||
tok.type >= FilterQueryLexer.EQUALS &&
|
||||
tok.type <= FilterQueryLexer.CONTAINS
|
||||
) {
|
||||
state = State.ExpectValue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
pos += text.length;
|
||||
}
|
||||
|
||||
console.log('cursorTok', cursorTok);
|
||||
|
||||
const out: ContextInfo = { context: CursorContext.NoFilter };
|
||||
|
||||
if (cursorTok) {
|
||||
out.token = cursorTok;
|
||||
}
|
||||
|
||||
console.log('out', cloneDeep(out));
|
||||
console.log('state', cloneDeep(state));
|
||||
|
||||
switch (state) {
|
||||
case State.ExpectKey:
|
||||
out.context = CursorContext.Key;
|
||||
break;
|
||||
case State.ExpectOperator:
|
||||
out.context = CursorContext.Operator;
|
||||
if (lastKey) out.key = lastKey.text;
|
||||
break;
|
||||
case State.ExpectValue:
|
||||
out.context = CursorContext.Value;
|
||||
if (lastKey) out.key = lastKey.text;
|
||||
|
||||
if (lastOperator) {
|
||||
out.operator = lastOperator.text;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
out.context = CursorContext.NoFilter;
|
||||
break;
|
||||
}
|
||||
|
||||
console.log('out', cloneDeep(out));
|
||||
|
||||
if (
|
||||
cursorTok &&
|
||||
cursorTok.type === FilterQueryLexer.QUOTED_TEXT &&
|
||||
(out.context === CursorContext.Key || out.context === CursorContext.NoFilter)
|
||||
) {
|
||||
out.context = CursorContext.FullText;
|
||||
}
|
||||
|
||||
// if (!cursorTok || cursorTok.type === antlr4.Token.EOF) {
|
||||
// out.context = CursorContext.NoFilter;
|
||||
// }
|
||||
|
||||
return out;
|
||||
}
|
||||
45
frontend/src/utils/explorerUtils.ts
Normal file
45
frontend/src/utils/explorerUtils.ts
Normal file
@ -0,0 +1,45 @@
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { ExplorerViews } from 'pages/LogsExplorer/utils';
|
||||
|
||||
// Mapping between panel types and explorer views
|
||||
export const panelTypeToExplorerView: Record<PANEL_TYPES, ExplorerViews> = {
|
||||
[PANEL_TYPES.LIST]: ExplorerViews.LIST,
|
||||
[PANEL_TYPES.TIME_SERIES]: ExplorerViews.TIMESERIES,
|
||||
[PANEL_TYPES.TRACE]: ExplorerViews.TRACE,
|
||||
[PANEL_TYPES.TABLE]: ExplorerViews.TABLE,
|
||||
[PANEL_TYPES.VALUE]: ExplorerViews.TIMESERIES,
|
||||
[PANEL_TYPES.BAR]: ExplorerViews.TIMESERIES,
|
||||
[PANEL_TYPES.PIE]: ExplorerViews.TIMESERIES,
|
||||
[PANEL_TYPES.HISTOGRAM]: ExplorerViews.TIMESERIES,
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: ExplorerViews.LIST,
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the explorer view based on panel type from URL or saved view
|
||||
* @param searchParams - URL search parameters
|
||||
* @param panelTypesFromUrl - Panel type extracted from URL
|
||||
* @returns The appropriate ExplorerViews value
|
||||
*/
|
||||
export const getExplorerViewFromUrl = (
|
||||
searchParams: URLSearchParams,
|
||||
panelTypesFromUrl: PANEL_TYPES | null,
|
||||
): ExplorerViews => {
|
||||
const savedView = searchParams.get(QueryParams.selectedExplorerView);
|
||||
if (savedView) {
|
||||
return savedView as ExplorerViews;
|
||||
}
|
||||
|
||||
// If no saved view, use panel type from URL to determine the view
|
||||
const urlPanelType = panelTypesFromUrl || PANEL_TYPES.LIST;
|
||||
return panelTypeToExplorerView[urlPanelType];
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the explorer view for a given panel type
|
||||
* @param panelType - The panel type
|
||||
* @returns The corresponding ExplorerViews value
|
||||
*/
|
||||
export const getExplorerViewForPanelType = (
|
||||
panelType: PANEL_TYPES,
|
||||
): ExplorerViews => panelTypeToExplorerView[panelType];
|
||||
1553
frontend/src/utils/queryContextUtils.ts
Normal file
1553
frontend/src/utils/queryContextUtils.ts
Normal file
File diff suppressed because it is too large
Load Diff
93
frontend/src/utils/tokenUtils.ts
Normal file
93
frontend/src/utils/tokenUtils.ts
Normal file
@ -0,0 +1,93 @@
|
||||
import { NON_VALUE_OPERATORS } from 'constants/antlrQueryConstants';
|
||||
import FilterQueryLexer from 'parser/FilterQueryLexer';
|
||||
import { IQueryPair } from 'types/antlrQueryTypes';
|
||||
|
||||
export function isKeyToken(tokenType: number): boolean {
|
||||
return tokenType === FilterQueryLexer.KEY;
|
||||
}
|
||||
|
||||
// Helper function to check if a token is an operator
|
||||
export function isOperatorToken(tokenType: number): boolean {
|
||||
return [
|
||||
FilterQueryLexer.EQUALS,
|
||||
FilterQueryLexer.NOT_EQUALS,
|
||||
FilterQueryLexer.NEQ,
|
||||
FilterQueryLexer.LT,
|
||||
FilterQueryLexer.LE,
|
||||
FilterQueryLexer.GT,
|
||||
FilterQueryLexer.GE,
|
||||
FilterQueryLexer.LIKE,
|
||||
FilterQueryLexer.ILIKE,
|
||||
FilterQueryLexer.BETWEEN,
|
||||
FilterQueryLexer.EXISTS,
|
||||
FilterQueryLexer.REGEXP,
|
||||
FilterQueryLexer.CONTAINS,
|
||||
FilterQueryLexer.IN,
|
||||
FilterQueryLexer.NOT,
|
||||
].includes(tokenType);
|
||||
}
|
||||
|
||||
// Helper function to check if a token is an operator which doesn't require a value
|
||||
export function isNonValueOperatorToken(tokenType: number): boolean {
|
||||
return [FilterQueryLexer.EXISTS].includes(tokenType);
|
||||
}
|
||||
|
||||
// Helper function to check if a token is a value
|
||||
export function isValueToken(tokenType: number): boolean {
|
||||
return [
|
||||
FilterQueryLexer.QUOTED_TEXT,
|
||||
FilterQueryLexer.NUMBER,
|
||||
FilterQueryLexer.BOOL,
|
||||
FilterQueryLexer.KEY,
|
||||
].includes(tokenType);
|
||||
}
|
||||
|
||||
// Helper function to check if a token is a conjunction
|
||||
export function isConjunctionToken(tokenType: number): boolean {
|
||||
return [FilterQueryLexer.AND, FilterQueryLexer.OR].includes(tokenType);
|
||||
}
|
||||
|
||||
// Helper function to check if a token is a bracket
|
||||
export function isBracketToken(tokenType: number): boolean {
|
||||
return [
|
||||
FilterQueryLexer.LPAREN,
|
||||
FilterQueryLexer.RPAREN,
|
||||
FilterQueryLexer.LBRACK,
|
||||
FilterQueryLexer.RBRACK,
|
||||
].includes(tokenType);
|
||||
}
|
||||
|
||||
// Helper function to check if an operator typically uses bracket values (multi-value operators)
|
||||
export function isMultiValueOperator(operatorToken?: string): boolean {
|
||||
if (!operatorToken) return false;
|
||||
|
||||
const upperOp = operatorToken.toUpperCase();
|
||||
return upperOp === 'IN';
|
||||
}
|
||||
|
||||
export function isFunctionToken(tokenType: number): boolean {
|
||||
return [
|
||||
FilterQueryLexer.HAS,
|
||||
FilterQueryLexer.HASANY,
|
||||
FilterQueryLexer.HASALL,
|
||||
].includes(tokenType);
|
||||
}
|
||||
|
||||
export function isWrappedUnderQuotes(token: string): boolean {
|
||||
if (!token) return false;
|
||||
const sanitizedToken = token.trim();
|
||||
return (
|
||||
(sanitizedToken.startsWith('"') && sanitizedToken.endsWith('"')) ||
|
||||
(sanitizedToken.startsWith("'") && sanitizedToken.endsWith("'"))
|
||||
);
|
||||
}
|
||||
|
||||
export function isQueryPairComplete(queryPair: Partial<IQueryPair>): boolean {
|
||||
if (!queryPair) return false;
|
||||
// A complete query pair must have a key, an operator, and a value (or EXISTS operator)
|
||||
if (queryPair.operator && NON_VALUE_OPERATORS.includes(queryPair.operator)) {
|
||||
return !!queryPair.key && !!queryPair.operator;
|
||||
}
|
||||
// For other operators, we need a value as well
|
||||
return Boolean(queryPair.key && queryPair.operator && queryPair.value);
|
||||
}
|
||||
@ -31,7 +31,7 @@
|
||||
],
|
||||
"types": ["node", "jest"]
|
||||
},
|
||||
"exclude": ["node_modules"],
|
||||
"exclude": ["node_modules", "src/parser/*.ts"],
|
||||
"include": [
|
||||
"./src",
|
||||
"./src/**/*.ts",
|
||||
|
||||
@ -2170,10 +2170,52 @@
|
||||
resolved "https://registry.npmjs.org/@babel/regjsgen/-/regjsgen-0.8.0.tgz"
|
||||
integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==
|
||||
|
||||
"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.1", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.14.5", "@babel/runtime@^7.14.6", "@babel/runtime@^7.15.4", "@babel/runtime@^7.16.3", "@babel/runtime@^7.16.7", "@babel/runtime@^7.17.2", "@babel/runtime@^7.17.8", "@babel/runtime@^7.18.0", "@babel/runtime@^7.18.3", "@babel/runtime@^7.19.0", "@babel/runtime@^7.20.0", "@babel/runtime@^7.20.7", "@babel/runtime@^7.21.0", "@babel/runtime@^7.22.5", "@babel/runtime@^7.23.2", "@babel/runtime@^7.3.1", "@babel/runtime@^7.4.2", "@babel/runtime@^7.5.5", "@babel/runtime@^7.6.2", "@babel/runtime@^7.7.2", "@babel/runtime@^7.7.6", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
|
||||
version "7.26.10"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.10.tgz#a07b4d8fa27af131a633d7b3524db803eb4764c2"
|
||||
integrity sha512-2WJMeRQPHKSPemqk/awGrAiuFfzBmOIPXKizAsVhWH9YJqLZ0H+HS4c8loHGgW6utJ3E/ejXQUsiGaQy2NZ9Fw==
|
||||
"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.1", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.14.5", "@babel/runtime@^7.15.4", "@babel/runtime@^7.16.3", "@babel/runtime@^7.16.7", "@babel/runtime@^7.17.2", "@babel/runtime@^7.17.8", "@babel/runtime@^7.18.0", "@babel/runtime@^7.18.3", "@babel/runtime@^7.19.0", "@babel/runtime@^7.20.0", "@babel/runtime@^7.20.7", "@babel/runtime@^7.4.2", "@babel/runtime@^7.5.5", "@babel/runtime@^7.6.2", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
|
||||
version "7.21.0"
|
||||
resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.0.tgz"
|
||||
integrity sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.13.11"
|
||||
|
||||
"@babel/runtime@^7.13.10":
|
||||
version "7.23.6"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.6.tgz#c05e610dc228855dc92ef1b53d07389ed8ab521d"
|
||||
integrity sha512-zHd0eUrf5GZoOWVCXp6koAKQTfZV07eit6bGPmJgnZdnSAvvZee6zniW2XMF7Cmc4ISOOnPy3QaSiIJGJkVEDQ==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.14.6":
|
||||
version "7.22.15"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.15.tgz#38f46494ccf6cf020bd4eed7124b425e83e523b8"
|
||||
integrity sha512-T0O+aa+4w0u06iNmapipJXMV4HoUir03hpx3/YqXXhu9xim3w+dVphjFWl1OH8NbZHw5Lbm9k45drDkgq2VNNA==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.18.6":
|
||||
version "7.27.0"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.27.0.tgz#fbee7cf97c709518ecc1f590984481d5460d4762"
|
||||
integrity sha512-VtPOkrdPHZsKc/clNqyi9WUA8TINkZ4cGk63UUE3u4pmB2k+ZMQRDuIOagv8UVd6j7k0T3+RRIb7beKTebNbcw==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.21.0", "@babel/runtime@^7.22.5", "@babel/runtime@^7.23.2":
|
||||
version "7.23.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.2.tgz#062b0ac103261d68a966c4c7baf2ae3e62ec3885"
|
||||
integrity sha512-mM8eg4yl5D6i3lu2QKPuPH4FArvJ8KhTofbE7jwMUv9KX5mBvwPAqnV3MlyBNqdp9RyRKP6Yck8TrfYrPvX3bg==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.3.1":
|
||||
version "7.23.1"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.1.tgz#72741dc4d413338a91dcb044a86f3c0bc402646d"
|
||||
integrity sha512-hC2v6p8ZSI/W0HUzh3V8C5g+NwSKzKPtJwSpTjwl0o297GP9+ZLQSkdvHz46CM3LqyoXxq+5G9komY+eSqSO0g==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.7.6":
|
||||
version "7.26.0"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.0.tgz#8600c2f595f277c60815256418b85356a65173c1"
|
||||
integrity sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
@ -2317,6 +2359,95 @@
|
||||
resolved "https://registry.yarnpkg.com/@braintree/sanitize-url/-/sanitize-url-7.0.1.tgz#457233b0a18741b7711855044102b82bae7a070b"
|
||||
integrity sha512-URg8UM6lfC9ZYqFipItRSxYJdgpU5d2Z4KnjsJ+rj6tgAmGme7E+PQNCiud8g0HDaZKMovu2qjfa0f5Ge0Vlsg==
|
||||
|
||||
"@codemirror/autocomplete@6.18.6", "@codemirror/autocomplete@^6.0.0":
|
||||
version "6.18.6"
|
||||
resolved "https://registry.yarnpkg.com/@codemirror/autocomplete/-/autocomplete-6.18.6.tgz#de26e864a1ec8192a1b241eb86addbb612964ddb"
|
||||
integrity sha512-PHHBXFomUs5DF+9tCOM/UoW6XQ4R44lLNNhRaW9PKPTU0D7lIjRg3ElxaJnTwsl/oHiR93WSXDBrekhoUGCPtg==
|
||||
dependencies:
|
||||
"@codemirror/language" "^6.0.0"
|
||||
"@codemirror/state" "^6.0.0"
|
||||
"@codemirror/view" "^6.17.0"
|
||||
"@lezer/common" "^1.0.0"
|
||||
|
||||
"@codemirror/commands@^6.0.0", "@codemirror/commands@^6.1.0":
|
||||
version "6.8.1"
|
||||
resolved "https://registry.yarnpkg.com/@codemirror/commands/-/commands-6.8.1.tgz#639f5559d2f33f2582a2429c58cb0c1b925c7a30"
|
||||
integrity sha512-KlGVYufHMQzxbdQONiLyGQDUW0itrLZwq3CcY7xpv9ZLRHqzkBSoteocBHtMCoY7/Ci4xhzSrToIeLg7FxHuaw==
|
||||
dependencies:
|
||||
"@codemirror/language" "^6.0.0"
|
||||
"@codemirror/state" "^6.4.0"
|
||||
"@codemirror/view" "^6.27.0"
|
||||
"@lezer/common" "^1.1.0"
|
||||
|
||||
"@codemirror/lang-javascript@6.2.3":
|
||||
version "6.2.3"
|
||||
resolved "https://registry.yarnpkg.com/@codemirror/lang-javascript/-/lang-javascript-6.2.3.tgz#d705c359dc816afcd3bcdf120a559f83d31d4cda"
|
||||
integrity sha512-8PR3vIWg7pSu7ur8A07pGiYHgy3hHj+mRYRCSG8q+mPIrl0F02rgpGv+DsQTHRTc30rydOsf5PZ7yjKFg2Ackw==
|
||||
dependencies:
|
||||
"@codemirror/autocomplete" "^6.0.0"
|
||||
"@codemirror/language" "^6.6.0"
|
||||
"@codemirror/lint" "^6.0.0"
|
||||
"@codemirror/state" "^6.0.0"
|
||||
"@codemirror/view" "^6.17.0"
|
||||
"@lezer/common" "^1.0.0"
|
||||
"@lezer/javascript" "^1.0.0"
|
||||
|
||||
"@codemirror/language@^6.0.0", "@codemirror/language@^6.6.0":
|
||||
version "6.11.0"
|
||||
resolved "https://registry.yarnpkg.com/@codemirror/language/-/language-6.11.0.tgz#5ae90972601497f4575f30811519d720bf7232c9"
|
||||
integrity sha512-A7+f++LodNNc1wGgoRDTt78cOwWm9KVezApgjOMp1W4hM0898nsqBXwF+sbePE7ZRcjN7Sa1Z5m2oN27XkmEjQ==
|
||||
dependencies:
|
||||
"@codemirror/state" "^6.0.0"
|
||||
"@codemirror/view" "^6.23.0"
|
||||
"@lezer/common" "^1.1.0"
|
||||
"@lezer/highlight" "^1.0.0"
|
||||
"@lezer/lr" "^1.0.0"
|
||||
style-mod "^4.0.0"
|
||||
|
||||
"@codemirror/lint@^6.0.0":
|
||||
version "6.8.5"
|
||||
resolved "https://registry.yarnpkg.com/@codemirror/lint/-/lint-6.8.5.tgz#9edaa808e764e28e07665b015951934c8ec3a418"
|
||||
integrity sha512-s3n3KisH7dx3vsoeGMxsbRAgKe4O1vbrnKBClm99PU0fWxmxsx5rR2PfqQgIt+2MMJBHbiJ5rfIdLYfB9NNvsA==
|
||||
dependencies:
|
||||
"@codemirror/state" "^6.0.0"
|
||||
"@codemirror/view" "^6.35.0"
|
||||
crelt "^1.0.5"
|
||||
|
||||
"@codemirror/search@^6.0.0":
|
||||
version "6.5.10"
|
||||
resolved "https://registry.yarnpkg.com/@codemirror/search/-/search-6.5.10.tgz#7367bfc88094d078b91c752bc74140fb565b55ee"
|
||||
integrity sha512-RMdPdmsrUf53pb2VwflKGHEe1XVM07hI7vV2ntgw1dmqhimpatSJKva4VA9h4TLUDOD4EIF02201oZurpnEFsg==
|
||||
dependencies:
|
||||
"@codemirror/state" "^6.0.0"
|
||||
"@codemirror/view" "^6.0.0"
|
||||
crelt "^1.0.5"
|
||||
|
||||
"@codemirror/state@^6.0.0", "@codemirror/state@^6.1.1", "@codemirror/state@^6.4.0", "@codemirror/state@^6.5.0":
|
||||
version "6.5.2"
|
||||
resolved "https://registry.yarnpkg.com/@codemirror/state/-/state-6.5.2.tgz#8eca3a64212a83367dc85475b7d78d5c9b7076c6"
|
||||
integrity sha512-FVqsPqtPWKVVL3dPSxy8wEF/ymIEuVzF1PK3VbUgrxXpJUSHQWWZz4JMToquRxnkw+36LTamCZG2iua2Ptq0fA==
|
||||
dependencies:
|
||||
"@marijn/find-cluster-break" "^1.0.0"
|
||||
|
||||
"@codemirror/theme-one-dark@^6.0.0":
|
||||
version "6.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@codemirror/theme-one-dark/-/theme-one-dark-6.1.2.tgz#fcef9f9cfc17a07836cb7da17c9f6d7231064df8"
|
||||
integrity sha512-F+sH0X16j/qFLMAfbciKTxVOwkdAS336b7AXTKOZhy8BR3eH/RelsnLgLFINrpST63mmN2OuwUt0W2ndUgYwUA==
|
||||
dependencies:
|
||||
"@codemirror/language" "^6.0.0"
|
||||
"@codemirror/state" "^6.0.0"
|
||||
"@codemirror/view" "^6.0.0"
|
||||
"@lezer/highlight" "^1.0.0"
|
||||
|
||||
"@codemirror/view@^6.0.0", "@codemirror/view@^6.17.0", "@codemirror/view@^6.23.0", "@codemirror/view@^6.27.0", "@codemirror/view@^6.35.0":
|
||||
version "6.36.6"
|
||||
resolved "https://registry.yarnpkg.com/@codemirror/view/-/view-6.36.6.tgz#735a6431caed0c2c7d26c645066b02f10e802812"
|
||||
integrity sha512-uxugGLet+Nzp0Jcit8Hn3LypM8ioMLKTsdf8FRoT3HWvZtb9GhaWMe0Cc15rz90Ljab4YFJiAulmIVB74OY0IQ==
|
||||
dependencies:
|
||||
"@codemirror/state" "^6.5.0"
|
||||
style-mod "^4.1.0"
|
||||
w3c-keyname "^2.2.4"
|
||||
|
||||
"@commitlint/cli@^16.3.0":
|
||||
version "16.3.0"
|
||||
resolved "https://registry.yarnpkg.com/@commitlint/cli/-/cli-16.3.0.tgz#5689f5c2abbb7880d5ff13329251e5648a784b16"
|
||||
@ -3031,6 +3162,34 @@
|
||||
resolved "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz"
|
||||
integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A==
|
||||
|
||||
"@lezer/common@^1.0.0", "@lezer/common@^1.1.0", "@lezer/common@^1.2.0":
|
||||
version "1.2.3"
|
||||
resolved "https://registry.yarnpkg.com/@lezer/common/-/common-1.2.3.tgz#138fcddab157d83da557554851017c6c1e5667fd"
|
||||
integrity sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==
|
||||
|
||||
"@lezer/highlight@^1.0.0", "@lezer/highlight@^1.1.3":
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/@lezer/highlight/-/highlight-1.2.1.tgz#596fa8f9aeb58a608be0a563e960c373cbf23f8b"
|
||||
integrity sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA==
|
||||
dependencies:
|
||||
"@lezer/common" "^1.0.0"
|
||||
|
||||
"@lezer/javascript@^1.0.0":
|
||||
version "1.5.1"
|
||||
resolved "https://registry.yarnpkg.com/@lezer/javascript/-/javascript-1.5.1.tgz#2a424a6ec29f1d4ef3c34cbccc5447e373618ad8"
|
||||
integrity sha512-ATOImjeVJuvgm3JQ/bpo2Tmv55HSScE2MTPnKRMRIPx2cLhHGyX2VnqpHhtIV1tVzIjZDbcWQm+NCTF40ggZVw==
|
||||
dependencies:
|
||||
"@lezer/common" "^1.2.0"
|
||||
"@lezer/highlight" "^1.1.3"
|
||||
"@lezer/lr" "^1.3.0"
|
||||
|
||||
"@lezer/lr@^1.0.0", "@lezer/lr@^1.3.0":
|
||||
version "1.4.2"
|
||||
resolved "https://registry.yarnpkg.com/@lezer/lr/-/lr-1.4.2.tgz#931ea3dea8e9de84e90781001dae30dea9ff1727"
|
||||
integrity sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA==
|
||||
dependencies:
|
||||
"@lezer/common" "^1.0.0"
|
||||
|
||||
"@mapbox/jsonlint-lines-primitives@~2.0.2":
|
||||
version "2.0.2"
|
||||
resolved "https://registry.npmjs.org/@mapbox/jsonlint-lines-primitives/-/jsonlint-lines-primitives-2.0.2.tgz"
|
||||
@ -3060,6 +3219,11 @@
|
||||
resolved "https://registry.npmjs.org/@mapbox/unitbezier/-/unitbezier-0.0.0.tgz"
|
||||
integrity sha512-HPnRdYO0WjFjRTSwO3frz1wKaU649OBFPX3Zo/2WZvuRi6zMiRGui8SnPQiQABgqCf8YikDe5t3HViTVw1WUzA==
|
||||
|
||||
"@marijn/find-cluster-break@^1.0.0":
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz#775374306116d51c0c500b8c4face0f9a04752d8"
|
||||
integrity sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==
|
||||
|
||||
"@mdx-js/loader@2.3.0":
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/@mdx-js/loader/-/loader-2.3.0.tgz#56a6b07eb0027b6407e953a97c52bd8619601161"
|
||||
@ -4699,11 +4863,52 @@
|
||||
"@typescript-eslint/types" "5.59.1"
|
||||
eslint-visitor-keys "^3.3.0"
|
||||
|
||||
"@uiw/codemirror-extensions-basic-setup@4.23.10":
|
||||
version "4.23.10"
|
||||
resolved "https://registry.yarnpkg.com/@uiw/codemirror-extensions-basic-setup/-/codemirror-extensions-basic-setup-4.23.10.tgz#e5d901e860a039ac61d955af26a12866e9dc356c"
|
||||
integrity sha512-zpbmSeNs3OU/f/Eyd6brFnjsBUYwv2mFjWxlAsIRSwTlW+skIT60rQHFBSfsj/5UVSxSLWVeUYczN7AyXvgTGQ==
|
||||
dependencies:
|
||||
"@codemirror/autocomplete" "^6.0.0"
|
||||
"@codemirror/commands" "^6.0.0"
|
||||
"@codemirror/language" "^6.0.0"
|
||||
"@codemirror/lint" "^6.0.0"
|
||||
"@codemirror/search" "^6.0.0"
|
||||
"@codemirror/state" "^6.0.0"
|
||||
"@codemirror/view" "^6.0.0"
|
||||
|
||||
"@uiw/codemirror-theme-copilot@4.23.11":
|
||||
version "4.23.11"
|
||||
resolved "https://registry.yarnpkg.com/@uiw/codemirror-theme-copilot/-/codemirror-theme-copilot-4.23.11.tgz#075a2a6449c62835af2cb7fdef4fe9558bf20f30"
|
||||
integrity sha512-m6vvsWHbji0s25ly3L35BdjSMys4DL3dzb4wbVtYa7m79heA3h2YNiWFIkOjbyPtoOh4RUGB6tBT8z0J/5PmTA==
|
||||
dependencies:
|
||||
"@uiw/codemirror-themes" "4.23.11"
|
||||
|
||||
"@uiw/codemirror-themes@4.23.11":
|
||||
version "4.23.11"
|
||||
resolved "https://registry.yarnpkg.com/@uiw/codemirror-themes/-/codemirror-themes-4.23.11.tgz#abd022b9d65c851d72ecbc93169bb5143b9c35b7"
|
||||
integrity sha512-90joUOau/3E6KNdA5ePr/t8LVBA/426wIsOuwaZohsDM5a5gsYfdMWGYfClnLMkpfHJUDYYMO+b2JPhJf9mzHw==
|
||||
dependencies:
|
||||
"@codemirror/language" "^6.0.0"
|
||||
"@codemirror/state" "^6.0.0"
|
||||
"@codemirror/view" "^6.0.0"
|
||||
|
||||
"@uiw/copy-to-clipboard@~1.0.12":
|
||||
version "1.0.15"
|
||||
resolved "https://registry.yarnpkg.com/@uiw/copy-to-clipboard/-/copy-to-clipboard-1.0.15.tgz#959cebbae64df353964647bb5b9d705176b2e613"
|
||||
integrity sha512-1bbGZ3T+SGmA07BoVPK4UCUDcowDN/moctviJGQexfOc9qL8TMLDQPr7mTPvDKhgJkgnlKkAQNFU8PiarIi9sQ==
|
||||
|
||||
"@uiw/react-codemirror@4.23.10":
|
||||
version "4.23.10"
|
||||
resolved "https://registry.yarnpkg.com/@uiw/react-codemirror/-/react-codemirror-4.23.10.tgz#2e34aec4f65f901ed8e9b8a22e28f2177addce69"
|
||||
integrity sha512-AbN4eVHOL4ckRuIXpZxkzEqL/1ChVA+BSdEnAKjIB68pLQvKsVoYbiFP8zkXkYc4+Fcgq5KbAjvYqdo4ewemKw==
|
||||
dependencies:
|
||||
"@babel/runtime" "^7.18.6"
|
||||
"@codemirror/commands" "^6.1.0"
|
||||
"@codemirror/state" "^6.1.1"
|
||||
"@codemirror/theme-one-dark" "^6.0.0"
|
||||
"@uiw/codemirror-extensions-basic-setup" "4.23.10"
|
||||
codemirror "^6.0.0"
|
||||
|
||||
"@uiw/react-markdown-preview@^4.1.14":
|
||||
version "4.1.15"
|
||||
resolved "https://registry.yarnpkg.com/@uiw/react-markdown-preview/-/react-markdown-preview-4.1.15.tgz#82f7ca4d7dc0e9896856fd795b5aa063d1209d2a"
|
||||
@ -5329,6 +5534,11 @@ antd@5.11.0:
|
||||
scroll-into-view-if-needed "^3.1.0"
|
||||
throttle-debounce "^5.0.0"
|
||||
|
||||
antlr4@4.13.2:
|
||||
version "4.13.2"
|
||||
resolved "https://registry.yarnpkg.com/antlr4/-/antlr4-4.13.2.tgz#0d084ad0e32620482a9c3a0e2470c02e72e4006d"
|
||||
integrity sha512-QiVbZhyy4xAZ17UPEuG3YTOt8ZaoeOR1CvEAqrEsDBsOqINslaB147i9xqljZqoyf5S+EUlGStaj+t22LT9MOg==
|
||||
|
||||
anymatch@^3.0.3, anymatch@~3.1.2:
|
||||
version "3.1.3"
|
||||
resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz"
|
||||
@ -6607,6 +6817,19 @@ co@^4.6.0:
|
||||
resolved "https://registry.npmjs.org/co/-/co-4.6.0.tgz"
|
||||
integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==
|
||||
|
||||
codemirror@^6.0.0:
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/codemirror/-/codemirror-6.0.1.tgz#62b91142d45904547ee3e0e0e4c1a79158035a29"
|
||||
integrity sha512-J8j+nZ+CdWmIeFIGXEFbFPtpiYacFMDR8GlHK3IyHQJMCaVRfGx9NT+Hxivv1ckLWPvNdZqndbr/7lVhrf/Svg==
|
||||
dependencies:
|
||||
"@codemirror/autocomplete" "^6.0.0"
|
||||
"@codemirror/commands" "^6.0.0"
|
||||
"@codemirror/language" "^6.0.0"
|
||||
"@codemirror/lint" "^6.0.0"
|
||||
"@codemirror/search" "^6.0.0"
|
||||
"@codemirror/state" "^6.0.0"
|
||||
"@codemirror/view" "^6.0.0"
|
||||
|
||||
collect-v8-coverage@^1.0.0:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz"
|
||||
@ -6940,6 +7163,11 @@ create-require@^1.1.0:
|
||||
resolved "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz"
|
||||
integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==
|
||||
|
||||
crelt@^1.0.5:
|
||||
version "1.0.6"
|
||||
resolved "https://registry.yarnpkg.com/crelt/-/crelt-1.0.6.tgz#7cc898ea74e190fb6ef9dae57f8f81cf7302df72"
|
||||
integrity sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==
|
||||
|
||||
cross-env@^7.0.3:
|
||||
version "7.0.3"
|
||||
resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-7.0.3.tgz#865264b29677dc015ba8418918965dd232fc54cf"
|
||||
@ -15284,6 +15512,11 @@ regenerator-runtime@^0.11.0:
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9"
|
||||
integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==
|
||||
|
||||
regenerator-runtime@^0.13.11:
|
||||
version "0.13.11"
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9"
|
||||
integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==
|
||||
|
||||
regenerator-runtime@^0.14.0:
|
||||
version "0.14.0"
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz#5e19d68eb12d486f797e15a3c6a918f7cec5eb45"
|
||||
@ -16491,6 +16724,11 @@ style-loader@1.3.0:
|
||||
loader-utils "^2.0.0"
|
||||
schema-utils "^2.7.0"
|
||||
|
||||
style-mod@^4.0.0, style-mod@^4.1.0:
|
||||
version "4.1.2"
|
||||
resolved "https://registry.yarnpkg.com/style-mod/-/style-mod-4.1.2.tgz#ca238a1ad4786520f7515a8539d5a63691d7bf67"
|
||||
integrity sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==
|
||||
|
||||
style-to-object@^0.4.0, style-to-object@^0.4.1:
|
||||
version "0.4.2"
|
||||
resolved "https://registry.yarnpkg.com/style-to-object/-/style-to-object-0.4.2.tgz#a8247057111dea8bd3b8a1a66d2d0c9cf9218a54"
|
||||
@ -17636,6 +17874,11 @@ w3c-hr-time@^1.0.2:
|
||||
dependencies:
|
||||
browser-process-hrtime "^1.0.0"
|
||||
|
||||
w3c-keyname@^2.2.4:
|
||||
version "2.2.8"
|
||||
resolved "https://registry.yarnpkg.com/w3c-keyname/-/w3c-keyname-2.2.8.tgz#7b17c8c6883d4e8b86ac8aba79d39e880f8869c5"
|
||||
integrity sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==
|
||||
|
||||
w3c-xmlserializer@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz"
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user