View File Name : 6587.bfb1ca0c32c6f325b0df.js.map
\n }\n >\n
\n \n \n )}\n >\n );\n};\n","import { from, Observable } from 'rxjs';\nimport { map } from 'rxjs/operators';\n\nimport { CustomVariableSupport, DataQueryRequest, MetricFindValue, ScopedVars, TimeRange } from '@grafana/data';\n\nimport { LokiVariableQueryEditor } from './components/VariableQueryEditor';\nimport { LokiDatasource } from './datasource';\nimport { LokiVariableQuery } from './types';\n\nexport class LokiVariableSupport extends CustomVariableSupport
{\n editor = LokiVariableQueryEditor;\n\n constructor(private datasource: LokiDatasource) {\n super();\n }\n\n async execute(query: LokiVariableQuery, scopedVars: ScopedVars, range: TimeRange) {\n return this.datasource.metricFindQuery(query, { scopedVars, range });\n }\n\n query(request: DataQueryRequest): Observable<{ data: MetricFindValue[] }> {\n const result = this.execute(request.targets[0], request.scopedVars, request.range);\n\n return from(result).pipe(map((data) => ({ data })));\n }\n}\n","import { groupBy } from 'lodash';\n\nimport { FieldType, DataFrame, DataLink, Field } from '@grafana/data';\nimport { getDataSourceSrv } from '@grafana/runtime';\n\nimport { DerivedFieldConfig } from './types';\n\nexport function getDerivedFields(dataFrame: DataFrame, derivedFieldConfigs: DerivedFieldConfig[]): Field[] {\n if (!derivedFieldConfigs.length) {\n return [];\n }\n const derivedFieldsGrouped = groupBy(derivedFieldConfigs, 'name');\n\n const newFields = Object.values(derivedFieldsGrouped).map(fieldFromDerivedFieldConfig);\n\n // line-field is the first string-field\n // NOTE: we should create some common log-frame-extra-string-field code somewhere\n const lineField = dataFrame.fields.find((f) => f.type === FieldType.string);\n\n if (lineField === undefined) {\n // if this is happening, something went wrong, let's raise an error\n throw new Error('invalid logs-dataframe, string-field missing');\n }\n\n const labelFields = dataFrame.fields.find((f) => f.type === FieldType.other && f.name === 'labels');\n\n for (let i = 0; i < lineField.values.length; i++) {\n for (const field of newFields) {\n // `matcherRegex` can be either a RegExp that is used to extract the value from the log line, or it can be a label key to derive the field from the labels\n if (derivedFieldsGrouped[field.name][0].matcherType === 'label' && labelFields) {\n const label = labelFields.values[i];\n if (label) {\n // Find the key that matches both, the `matcherRegex` and the label key\n const intersectingKey = Object.keys(label).find(\n (key) => derivedFieldsGrouped[field.name][0].matcherRegex === key\n );\n\n if (intersectingKey) {\n field.values.push(label[intersectingKey]);\n continue;\n }\n }\n field.values.push(null);\n } else if (\n derivedFieldsGrouped[field.name][0].matcherType === 'regex' ||\n derivedFieldsGrouped[field.name][0].matcherType === undefined\n ) {\n // `matcherRegex` will actually be used as a RegExp here\n const line = lineField.values[i];\n const logMatch = line.match(derivedFieldsGrouped[field.name][0].matcherRegex);\n\n if (logMatch && logMatch[1]) {\n field.values.push(logMatch[1]);\n continue;\n }\n\n field.values.push(null);\n } else {\n field.values.push(null);\n }\n }\n }\n\n return newFields;\n}\n\n/**\n * Transform derivedField config into dataframe field with config that contains link.\n */\nfunction fieldFromDerivedFieldConfig(derivedFieldConfigs: DerivedFieldConfig[]): Field {\n const dataSourceSrv = getDataSourceSrv();\n\n const dataLinks = derivedFieldConfigs.reduce((acc, derivedFieldConfig) => {\n // Having field.datasourceUid means it is an internal link.\n if (derivedFieldConfig.datasourceUid) {\n const dsSettings = dataSourceSrv.getInstanceSettings(derivedFieldConfig.datasourceUid);\n const queryType = (type: string | undefined): string | undefined => {\n switch (type) {\n case 'tempo':\n return 'traceql';\n case 'grafana-x-ray-datasource':\n return 'getTrace';\n default:\n return undefined;\n }\n };\n\n acc.push({\n // Will be filled out later\n title: derivedFieldConfig.urlDisplayLabel || '',\n url: '',\n // This is hardcoded for Jaeger or Zipkin not way right now to specify datasource specific query object\n internal: {\n query: { query: derivedFieldConfig.url, queryType: queryType(dsSettings?.type) },\n datasourceUid: derivedFieldConfig.datasourceUid,\n datasourceName: dsSettings?.name ?? 'Data source not found',\n },\n });\n } else if (derivedFieldConfig.url) {\n acc.push({\n // We do not know what title to give here so we count on presentation layer to create a title from metadata.\n title: derivedFieldConfig.urlDisplayLabel || '',\n // This is hardcoded for Jaeger or Zipkin not way right now to specify datasource specific query object\n url: derivedFieldConfig.url,\n });\n }\n return acc;\n }, []);\n\n return {\n name: derivedFieldConfigs[0].name,\n type: FieldType.string,\n config: {\n links: dataLinks,\n },\n // We are adding values later on\n values: [],\n };\n}\n","import { groupBy } from 'lodash';\n\nimport { DataFrame, Field, FieldType } from '@grafana/data';\n\nexport function makeTableFrames(instantMetricFrames: DataFrame[]): DataFrame[] {\n // first we remove frames that have no refId\n // (we will group them by refId, so we need it to be set)\n const framesWithRefId = instantMetricFrames.filter((f) => f.refId !== undefined);\n\n const framesByRefId = groupBy(framesWithRefId, (frame) => frame.refId);\n\n return Object.entries(framesByRefId).map(([refId, frames]) => makeTableFrame(frames, refId));\n}\n\ntype NumberField = Field;\ntype StringField = Field;\n\nfunction makeTableFrame(instantMetricFrames: DataFrame[], refId: string): DataFrame {\n const tableTimeField: NumberField = { name: 'Time', config: {}, values: [], type: FieldType.time };\n const tableValueField: NumberField = {\n name: `Value #${refId}`,\n config: {},\n values: [],\n type: FieldType.number,\n };\n\n // Sort metric labels, create columns for them and record their index\n const allLabelNames = new Set(\n instantMetricFrames.map((frame) => frame.fields.map((field) => Object.keys(field.labels ?? {})).flat()).flat()\n );\n\n const sortedLabelNames = Array.from(allLabelNames).sort();\n\n const labelFields: StringField[] = sortedLabelNames.map((labelName) => ({\n name: labelName,\n config: { filterable: true },\n values: [],\n type: FieldType.string,\n }));\n\n instantMetricFrames.forEach((frame) => {\n const timeField = frame.fields.find((field) => field.type === FieldType.time);\n const valueField = frame.fields.find((field) => field.type === FieldType.number);\n if (timeField == null || valueField == null) {\n return;\n }\n\n const timeArray = timeField.values;\n const valueArray = valueField.values;\n\n for (let x of timeArray) {\n tableTimeField.values.push(x);\n }\n\n for (let x of valueArray) {\n tableValueField.values.push(x);\n }\n\n const labels = valueField.labels ?? {};\n\n for (let f of labelFields) {\n const text = labels[f.name] ?? '';\n // we insert the labels as many times as we have values\n for (let i = 0; i < valueArray.length; i++) {\n f.values.push(text);\n }\n }\n });\n\n return {\n fields: [tableTimeField, ...labelFields, tableValueField],\n refId,\n meta: { preferredVisualisationType: 'table' },\n length: tableTimeField.values.length,\n };\n}\n","import { DataQueryResponse, DataFrame, isDataFrame, FieldType, QueryResultMeta, DataQueryError } from '@grafana/data';\n\nimport { getDerivedFields } from './getDerivedFields';\nimport { makeTableFrames } from './makeTableFrames';\nimport { getHighlighterExpressionsFromQuery } from './queryUtils';\nimport { dataFrameHasLokiError } from './responseUtils';\nimport { DerivedFieldConfig, LokiQuery, LokiQueryType } from './types';\n\nfunction isMetricFrame(frame: DataFrame): boolean {\n return frame.fields.every((field) => field.type === FieldType.time || field.type === FieldType.number);\n}\n\n// returns a new frame, with meta shallow merged with its original meta\nfunction setFrameMeta(frame: DataFrame, meta: QueryResultMeta): DataFrame {\n const { meta: oldMeta, ...rest } = frame;\n // meta maybe be undefined, we need to handle that\n const newMeta = { ...oldMeta, ...meta };\n return {\n ...rest,\n meta: newMeta,\n };\n}\n\nfunction processStreamFrame(\n frame: DataFrame,\n query: LokiQuery | undefined,\n derivedFieldConfigs: DerivedFieldConfig[]\n): DataFrame {\n const custom: Record = {\n ...frame.meta?.custom, // keep the original meta.custom\n // used by logsModel\n lokiQueryStatKey: 'Summary: total bytes processed',\n };\n\n if (dataFrameHasLokiError(frame)) {\n custom.error = 'Error when parsing some of the logs';\n }\n\n const meta: QueryResultMeta = {\n preferredVisualisationType: 'logs',\n limit: query?.maxLines,\n searchWords: query !== undefined ? getHighlighterExpressionsFromQuery(query.expr) : undefined,\n custom,\n };\n\n const newFrame = setFrameMeta(frame, meta);\n const derivedFields = getDerivedFields(newFrame, derivedFieldConfigs);\n return {\n ...newFrame,\n fields: [...newFrame.fields, ...derivedFields],\n };\n}\n\nfunction processStreamsFrames(\n frames: DataFrame[],\n queryMap: Map,\n derivedFieldConfigs: DerivedFieldConfig[]\n): DataFrame[] {\n return frames.map((frame) => {\n const query = frame.refId !== undefined ? queryMap.get(frame.refId) : undefined;\n return processStreamFrame(frame, query, derivedFieldConfigs);\n });\n}\n\nfunction processMetricInstantFrames(frames: DataFrame[]): DataFrame[] {\n return frames.length > 0 ? makeTableFrames(frames) : [];\n}\n\nfunction processMetricRangeFrames(frames: DataFrame[]): DataFrame[] {\n const meta: QueryResultMeta = { preferredVisualisationType: 'graph' };\n return frames.map((frame) => setFrameMeta(frame, meta));\n}\n\n// we split the frames into 3 groups, because we will handle\n// each group slightly differently\nfunction groupFrames(\n frames: DataFrame[],\n queryMap: Map\n): {\n streamsFrames: DataFrame[];\n metricInstantFrames: DataFrame[];\n metricRangeFrames: DataFrame[];\n} {\n const streamsFrames: DataFrame[] = [];\n const metricInstantFrames: DataFrame[] = [];\n const metricRangeFrames: DataFrame[] = [];\n\n frames.forEach((frame) => {\n if (!isMetricFrame(frame)) {\n streamsFrames.push(frame);\n } else {\n const isInstantFrame = frame.refId != null && queryMap.get(frame.refId)?.queryType === LokiQueryType.Instant;\n if (isInstantFrame) {\n metricInstantFrames.push(frame);\n } else {\n metricRangeFrames.push(frame);\n }\n }\n });\n\n return { streamsFrames, metricInstantFrames, metricRangeFrames };\n}\n\nfunction improveError(error: DataQueryError | undefined, queryMap: Map): DataQueryError | undefined {\n // many things are optional in an error-object, we need an error-message to exist,\n // and we need to find the loki-query, based on the refId in the error-object.\n if (error === undefined) {\n return error;\n }\n\n const { refId, message } = error;\n if (refId === undefined || message === undefined) {\n return error;\n }\n\n const query = queryMap.get(refId);\n if (query === undefined) {\n return error;\n }\n\n if (message.includes('escape') && query.expr.includes('\\\\')) {\n return {\n ...error,\n message: `${message}. Make sure that all special characters are escaped with \\\\. For more information on escaping of special characters visit LogQL documentation at https://grafana.com/docs/loki/latest/logql/.`,\n };\n }\n\n return error;\n}\n\nexport function transformBackendResult(\n response: DataQueryResponse,\n queries: LokiQuery[],\n derivedFieldConfigs: DerivedFieldConfig[]\n): DataQueryResponse {\n const { data, error, ...rest } = response;\n\n // in the typescript type, data is an array of basically anything.\n // we do know that they have to be dataframes, so we make a quick check,\n // this way we can be sure, and also typescript is happy.\n const dataFrames = data.map((d) => {\n if (!isDataFrame(d)) {\n throw new Error('transformation only supports dataframe responses');\n }\n return d;\n });\n\n const queryMap = new Map(queries.map((query) => [query.refId, query]));\n\n const { streamsFrames, metricInstantFrames, metricRangeFrames } = groupFrames(dataFrames, queryMap);\n\n return {\n ...rest,\n error: improveError(error, queryMap),\n data: [\n ...processMetricRangeFrames(metricRangeFrames),\n ...processMetricInstantFrames(metricInstantFrames),\n ...processStreamsFrames(streamsFrames, queryMap, derivedFieldConfigs),\n ],\n };\n}\n","// Libraries\nimport React, { memo } from 'react';\n\nimport { AnnotationQuery } from '@grafana/data';\nimport { EditorField, EditorRow } from '@grafana/experimental';\nimport { Input } from '@grafana/ui';\n\n// Types\nimport { getNormalizedLokiQuery } from '../queryUtils';\nimport { LokiQuery, LokiQueryType } from '../types';\n\nimport { LokiOptionFields } from './LokiOptionFields';\nimport { LokiQueryField } from './LokiQueryField';\nimport { LokiQueryEditorProps } from './types';\n\ntype Props = LokiQueryEditorProps & {\n annotation?: AnnotationQuery;\n onAnnotationChange?: (annotation: AnnotationQuery) => void;\n};\n\nexport const LokiAnnotationsQueryEditor = memo(function LokiAnnotationQueryEditor(props: Props) {\n const { annotation, onAnnotationChange, history } = props;\n\n // this should never happen, but we want to keep typescript happy\n if (annotation === undefined || onAnnotationChange === undefined) {\n return null;\n }\n\n const onChangeQuery = (query: LokiQuery) => {\n // the current version of annotations only stores an optional boolean\n // field `instant` to handle the instant/range switch.\n // we need to maintain compatibility for now, so we do the same.\n // we explicitly call `getNormalizedLokiQuery` to make sure `queryType`\n // is set up correctly.\n const instant = getNormalizedLokiQuery(query).queryType === LokiQueryType.Instant;\n onAnnotationChange({\n ...annotation,\n expr: query.expr,\n maxLines: query.maxLines,\n instant,\n });\n };\n\n const queryWithRefId: LokiQuery = {\n refId: '',\n expr: annotation.expr,\n maxLines: annotation.maxLines,\n instant: annotation.instant,\n queryType: annotation.queryType,\n };\n return (\n <>\n \n {}}\n history={history}\n ExtraFieldElement={\n {}}\n onChange={onChangeQuery}\n />\n }\n />\n
\n\n \n \n {\n onAnnotationChange({\n ...annotation,\n titleFormat: event.currentTarget.value,\n });\n }}\n />\n \n \n {\n onAnnotationChange({\n ...annotation,\n tagKeys: event.currentTarget.value,\n });\n }}\n />\n \n \n {\n onAnnotationChange({\n ...annotation,\n textFormat: event.currentTarget.value,\n });\n }}\n />\n \n \n >\n );\n});\n","import { DataFrame, QueryHint } from '@grafana/data';\n\nimport {\n isQueryWithLabelFilter,\n isQueryPipelineErrorFiltering,\n isQueryWithLabelFormat,\n isQueryWithParser,\n isQueryWithLineFilter,\n} from './queryUtils';\nimport {\n dataFrameHasLevelLabel,\n extractHasErrorLabelFromDataFrame,\n extractLevelLikeLabelFromDataFrame,\n extractLogParserFromDataFrame,\n} from './responseUtils';\n\nexport function getQueryHints(query: string, series: DataFrame[]): QueryHint[] {\n if (series.length === 0) {\n return [];\n }\n\n const hints: QueryHint[] = [];\n const { queryWithParser, parserCount } = isQueryWithParser(query);\n\n if (!queryWithParser) {\n const { hasLogfmt, hasJSON, hasPack } = extractLogParserFromDataFrame(series[0]);\n if (hasJSON) {\n if (hasPack) {\n hints.push({\n type: 'ADD_UNPACK_PARSER',\n label: 'Selected log stream selector has packed logs.',\n fix: {\n title: 'add unpack parser',\n label: 'Consider using unpack parser.',\n action: {\n type: 'ADD_UNPACK_PARSER',\n query,\n },\n },\n });\n } else {\n hints.push({\n type: 'ADD_JSON_PARSER',\n label: 'Selected log stream selector has JSON formatted logs.',\n fix: {\n title: 'add json parser',\n label: 'Consider using JSON parser.',\n action: {\n type: 'ADD_JSON_PARSER',\n query,\n },\n },\n });\n }\n }\n\n if (hasLogfmt) {\n hints.push({\n type: 'ADD_LOGFMT_PARSER',\n label: 'Selected log stream selector has logfmt formatted logs.',\n fix: {\n title: 'add logfmt parser',\n label: 'Consider using logfmt parser to turn key-value pairs in your log lines to labels.',\n action: {\n type: 'ADD_LOGFMT_PARSER',\n query,\n },\n },\n });\n }\n }\n\n if (queryWithParser) {\n // To keep this simple, we consider pipeline error filtering hint only is query has up to 1 parser\n if (parserCount === 1) {\n const hasPipelineErrorFiltering = isQueryPipelineErrorFiltering(query);\n const hasError = extractHasErrorLabelFromDataFrame(series[0]);\n if (hasError && !hasPipelineErrorFiltering) {\n hints.push({\n type: 'ADD_NO_PIPELINE_ERROR',\n label: 'Some logs in your selected log streams have parsing error.',\n fix: {\n title: 'remove pipeline errors',\n label: 'Consider filtering out logs with parsing errors.',\n action: {\n type: 'ADD_NO_PIPELINE_ERROR',\n query,\n },\n },\n });\n }\n }\n\n const hasLabelFilter = isQueryWithLabelFilter(query);\n\n if (!hasLabelFilter) {\n hints.push({\n type: 'ADD_LABEL_FILTER',\n label: 'Consider filtering logs by their label and value.',\n fix: {\n title: 'add label filter',\n label: '',\n action: {\n type: 'ADD_LABEL_FILTER',\n query,\n },\n },\n });\n }\n }\n\n const queryWithLabelFormat = isQueryWithLabelFormat(query);\n if (!queryWithLabelFormat) {\n const hasLevel = dataFrameHasLevelLabel(series[0]);\n const levelLikeLabel = extractLevelLikeLabelFromDataFrame(series[0]);\n\n // Add hint only if we don't have \"level\" label and have level-like label\n if (!hasLevel && levelLikeLabel) {\n hints.push({\n type: 'ADD_LEVEL_LABEL_FORMAT',\n label: `Some logs in your selected log stream have \"${levelLikeLabel}\" label.`,\n fix: {\n title: 'add label level format',\n label: `If ${levelLikeLabel} label has level values, consider using label_format to rename it to \"level\". Level label can be then visualized in log volumes.`,\n action: {\n type: 'ADD_LEVEL_LABEL_FORMAT',\n query,\n options: {\n renameTo: 'level',\n originalLabel: levelLikeLabel,\n },\n },\n },\n });\n }\n }\n\n const hasLineFilter = isQueryWithLineFilter(query);\n\n if (!hasLineFilter) {\n hints.push({\n type: 'ADD_LINE_FILTER',\n label: 'Consider filtering logs for specific string.',\n fix: {\n title: 'add line filter',\n label: '',\n action: {\n type: 'ADD_LINE_FILTER',\n query,\n },\n },\n });\n }\n\n return hints;\n}\n","// every timestamp in this file is a number which contains an unix-timestamp-in-millisecond format,\n// like returned by `new Date().getTime()`. this is needed because the \"math\"\n// has to be done on integer numbers.\n\n// the way loki handles logs-range-queries is that if you specify start & end,\n// one of those will be included, but the other will not. this allows us to\n// make it easy to split ranges.\n// for example, if the time-range is 100<>150,\n// we can split it into:\n// - 100<>120\n// - 120<>140\n// - 140<>150\n// and no log-line will be skipped or duplicated\n// (NOTE: we do these calculations in milliseconds. at the end, Loki receives\n// nanoseconds, but it will be OK, because it's simply a matter to adding `000000`,\n// to the end, so if we do it right in milliseconds, it should be OK in\n// nanoseconds too\n\nexport function splitTimeRange(\n startTime: number,\n endTime: number,\n idealRangeDuration: number\n): Array<[number, number]> {\n if (endTime - startTime <= idealRangeDuration) {\n return [[startTime, endTime]];\n }\n\n const result: Array<[number, number]> = [];\n\n // we walk backward, because we need want the potentially smaller \"last\" chunk\n // to be at the oldest timestamp.\n for (let chunkEndTime = endTime; chunkEndTime > startTime; chunkEndTime -= idealRangeDuration) {\n // when we get close to the start of the time range, we need to be sure not\n // to cross over the startTime\n const chunkStartTime = Math.max(chunkEndTime - idealRangeDuration, startTime);\n result.push([chunkStartTime, chunkEndTime]);\n }\n\n // because we walked backwards, we need to reverse the array\n result.reverse();\n\n return result;\n}\n","// every timestamp in this file is a number which contains an unix-timestamp-in-millisecond format,\n// like returned by `new Date().getTime()`. this is needed because the \"math\"\n// has to be done on integer numbers.\n\n// we are trying to be compatible with\n// https://github.com/grafana/loki/blob/089ec1b05f5ec15a8851d0e8230153e0eeb4dcec/pkg/querier/queryrange/split_by_interval.go#L327-L336\n\nexport function splitTimeRange(\n startTime: number,\n endTime: number,\n step: number,\n idealRangeDuration: number\n): Array<[number, number]> {\n if (idealRangeDuration < step) {\n // we cannot create chunks smaller than `step`\n return [[startTime, endTime]];\n }\n\n // we make the duration a multiple of `step`, lowering it if necessary\n const alignedDuration = Math.trunc(idealRangeDuration / step) * step;\n\n const alignedStartTime = startTime - (startTime % step);\n\n const result: Array<[number, number]> = [];\n\n // in a previous version we started iterating from the end, to the start.\n // However this is not easily possible as end timestamps are always inclusive\n // for Loki. So a `2022-02-08T00:00:00Z` end time with a 1day step would mean\n // to include the 08.02.2022, which we don't want. So we have to start from\n // the start, always ending at the last step before the actual end, or the total end.\n for (let chunkStartTime = alignedStartTime; chunkStartTime < endTime; chunkStartTime += alignedDuration) {\n const chunkEndTime = Math.min(chunkStartTime + alignedDuration - step, endTime);\n result.push([chunkStartTime, chunkEndTime]);\n }\n\n return result;\n}\n","import { groupBy, partition } from 'lodash';\nimport { Observable, Subscriber, Subscription, tap } from 'rxjs';\nimport { v4 as uuidv4 } from 'uuid';\n\nimport {\n arrayToDataFrame,\n DataQueryRequest,\n DataQueryResponse,\n DataTopic,\n dateTime,\n durationToMilliseconds,\n parseDuration,\n rangeUtil,\n TimeRange,\n LoadingState,\n} from '@grafana/data';\nimport { combineResponses } from '@grafana/o11y-ds-frontend';\n\nimport { LokiDatasource } from './datasource';\nimport { splitTimeRange as splitLogsTimeRange } from './logsTimeSplitting';\nimport { splitTimeRange as splitMetricTimeRange } from './metricTimeSplitting';\nimport { isLogsQuery, isQueryWithRangeVariable } from './queryUtils';\nimport { trackGroupedQueries } from './tracking';\nimport { LokiGroupedRequest, LokiQuery, LokiQueryType } from './types';\n\nexport function partitionTimeRange(\n isLogsQuery: boolean,\n originalTimeRange: TimeRange,\n stepMs: number,\n duration: number\n): TimeRange[] {\n const start = originalTimeRange.from.toDate().getTime();\n const end = originalTimeRange.to.toDate().getTime();\n\n const ranges = isLogsQuery\n ? splitLogsTimeRange(start, end, duration)\n : splitMetricTimeRange(start, end, stepMs, duration);\n\n return ranges.map(([start, end]) => {\n const from = dateTime(start);\n const to = dateTime(end);\n return {\n from,\n to,\n raw: { from, to },\n };\n });\n}\n\n/**\n * Based in the state of the current response, if any, adjust target parameters such as `maxLines`.\n * For `maxLines`, we will update it as `maxLines - current amount of lines`.\n * At the end, we will filter the targets that don't need to be executed in the next request batch,\n * becasue, for example, the `maxLines` have been reached.\n */\nfunction adjustTargetsFromResponseState(targets: LokiQuery[], response: DataQueryResponse | null): LokiQuery[] {\n if (!response) {\n return targets;\n }\n\n return targets\n .map((target) => {\n if (!target.maxLines || !isLogsQuery(target.expr)) {\n return target;\n }\n const targetFrame = response.data.find((frame) => frame.refId === target.refId);\n if (!targetFrame) {\n return target;\n }\n const updatedMaxLines = target.maxLines - targetFrame.length;\n return {\n ...target,\n maxLines: updatedMaxLines < 0 ? 0 : updatedMaxLines,\n };\n })\n .filter((target) => target.maxLines === undefined || target.maxLines > 0);\n}\nexport function runSplitGroupedQueries(datasource: LokiDatasource, requests: LokiGroupedRequest[]) {\n const responseKey = requests.length ? requests[0].request.queryGroupId : uuidv4();\n let mergedResponse: DataQueryResponse = { data: [], state: LoadingState.Streaming, key: responseKey };\n const totalRequests = Math.max(...requests.map(({ partition }) => partition.length));\n const longestPartition = requests.filter(({ partition }) => partition.length === totalRequests)[0].partition;\n\n let shouldStop = false;\n let subquerySubsciption: Subscription | null = null;\n const runNextRequest = (subscriber: Subscriber, requestN: number, requestGroup: number) => {\n if (shouldStop) {\n subscriber.complete();\n return;\n }\n\n const done = () => {\n mergedResponse.state = LoadingState.Done;\n subscriber.next(mergedResponse);\n subscriber.complete();\n };\n\n const nextRequest = () => {\n const { nextRequestN, nextRequestGroup } = getNextRequestPointers(requests, requestGroup, requestN);\n if (nextRequestN > 0 && nextRequestGroup >= 0) {\n runNextRequest(subscriber, nextRequestN, nextRequestGroup);\n return;\n }\n done();\n };\n\n const group = requests[requestGroup];\n const range = group.partition[requestN - 1];\n const targets = adjustTargetsFromResponseState(group.request.targets, mergedResponse);\n\n if (!targets.length) {\n nextRequest();\n return;\n }\n\n const subRequest = { ...requests[requestGroup].request, range, targets };\n // request may not have a request id\n if (group.request.requestId) {\n subRequest.requestId = `${group.request.requestId}_${requestN}`;\n }\n\n subquerySubsciption = datasource.runQuery(subRequest).subscribe({\n next: (partialResponse) => {\n mergedResponse = combineResponses(mergedResponse, partialResponse);\n mergedResponse = updateLoadingFrame(mergedResponse, subRequest, longestPartition, requestN);\n if ((mergedResponse.errors ?? []).length > 0 || mergedResponse.error != null) {\n shouldStop = true;\n }\n },\n complete: () => {\n subscriber.next(mergedResponse);\n nextRequest();\n },\n error: (error) => {\n subscriber.error(error);\n },\n });\n };\n\n const response = new Observable((subscriber) => {\n runNextRequest(subscriber, totalRequests, 0);\n return () => {\n shouldStop = true;\n if (subquerySubsciption != null) {\n subquerySubsciption.unsubscribe();\n }\n };\n });\n\n return response;\n}\n\nexport const LOADING_FRAME_NAME = 'loki-splitting-progress';\n\nfunction updateLoadingFrame(\n response: DataQueryResponse,\n request: DataQueryRequest,\n partition: TimeRange[],\n requestN: number\n): DataQueryResponse {\n if (isLogsQuery(request.targets[0].expr)) {\n return response;\n }\n response.data = response.data.filter((frame) => frame.name !== LOADING_FRAME_NAME);\n\n if (requestN <= 1) {\n return response;\n }\n\n const loadingFrame = arrayToDataFrame([\n {\n time: partition[0].from.valueOf(),\n timeEnd: partition[requestN - 2].to.valueOf(),\n isRegion: true,\n color: 'rgba(120, 120, 120, 0.1)',\n },\n ]);\n loadingFrame.name = LOADING_FRAME_NAME;\n loadingFrame.meta = {\n dataTopic: DataTopic.Annotations,\n };\n\n response.data.push(loadingFrame);\n\n return response;\n}\n\nfunction getNextRequestPointers(requests: LokiGroupedRequest[], requestGroup: number, requestN: number) {\n // There's a pending request from the next group:\n for (let i = requestGroup + 1; i < requests.length; i++) {\n const group = requests[i];\n if (group.partition[requestN - 1]) {\n return {\n nextRequestGroup: i,\n nextRequestN: requestN,\n };\n }\n }\n return {\n // Find the first group where `[requestN - 1]` is defined\n nextRequestGroup: requests.findIndex((group) => group?.partition[requestN - 1] !== undefined),\n nextRequestN: requestN - 1,\n };\n}\n\nfunction querySupportsSplitting(query: LokiQuery) {\n return (\n query.queryType !== LokiQueryType.Instant &&\n // Queries with $__range variable should not be split because then the interpolated $__range variable is incorrect\n // because it is interpolated on the backend with the split timeRange\n !isQueryWithRangeVariable(query.expr)\n );\n}\n\nexport function runSplitQuery(datasource: LokiDatasource, request: DataQueryRequest) {\n const queries = request.targets.filter((query) => !query.hide).filter((query) => query.expr);\n const [nonSplittingQueries, normalQueries] = partition(queries, (query) => !querySupportsSplitting(query));\n const [logQueries, metricQueries] = partition(normalQueries, (query) => isLogsQuery(query.expr));\n\n request.queryGroupId = uuidv4();\n const oneDayMs = 24 * 60 * 60 * 1000;\n const rangePartitionedLogQueries = groupBy(logQueries, (query) =>\n query.splitDuration ? durationToMilliseconds(parseDuration(query.splitDuration)) : oneDayMs\n );\n const rangePartitionedMetricQueries = groupBy(metricQueries, (query) =>\n query.splitDuration ? durationToMilliseconds(parseDuration(query.splitDuration)) : oneDayMs\n );\n\n const requests: LokiGroupedRequest[] = [];\n for (const [chunkRangeMs, queries] of Object.entries(rangePartitionedLogQueries)) {\n const resolutionPartition = groupBy(queries, (query) => query.resolution || 1);\n for (const resolution in resolutionPartition) {\n requests.push({\n request: { ...request, targets: resolutionPartition[resolution] },\n partition: partitionTimeRange(true, request.range, request.intervalMs, Number(chunkRangeMs)),\n });\n }\n }\n\n for (const [chunkRangeMs, queries] of Object.entries(rangePartitionedMetricQueries)) {\n const stepMsPartition = groupBy(queries, (query) =>\n calculateStep(request.intervalMs, request.range, query.resolution || 1, query.step)\n );\n\n for (const stepMs in stepMsPartition) {\n const targets = stepMsPartition[stepMs].map((q) => {\n const { maxLines, ...query } = q;\n return query;\n });\n requests.push({\n request: { ...request, targets },\n partition: partitionTimeRange(false, request.range, Number(stepMs), Number(chunkRangeMs)),\n });\n }\n }\n\n if (nonSplittingQueries.length) {\n requests.push({\n request: { ...request, targets: nonSplittingQueries },\n partition: [request.range],\n });\n }\n\n const startTime = new Date();\n return runSplitGroupedQueries(datasource, requests).pipe(\n tap((response) => {\n if (response.state === LoadingState.Done) {\n trackGroupedQueries(response, requests, request, startTime, {\n predefinedOperations: datasource.predefinedOperations,\n });\n }\n })\n );\n}\n\n// Replicate from backend for split queries for now, until we can move query splitting to the backend\n// https://github.com/grafana/grafana/blob/main/pkg/tsdb/loki/step.go#L23\nfunction calculateStep(intervalMs: number, range: TimeRange, resolution: number, step: string | undefined) {\n // If we can parse step,the we use it\n // Otherwise we will calculate step based on interval\n const interval_regex = /(-?\\d+(?:\\.\\d+)?)(ms|[Mwdhmsy])/;\n if (step?.match(interval_regex)) {\n return rangeUtil.intervalToMs(step) * resolution;\n }\n\n const newStep = intervalMs * resolution;\n const safeStep = Math.round((range.to.valueOf() - range.from.valueOf()) / 11000);\n return Math.max(newStep, safeStep);\n}\n","import { map, Observable, defer, mergeMap } from 'rxjs';\n\nimport {\n DataFrameJSON,\n DataQueryRequest,\n DataQueryResponse,\n LiveChannelScope,\n LoadingState,\n StreamingDataFrame,\n} from '@grafana/data';\nimport { getGrafanaLiveSrv, config } from '@grafana/runtime';\n\nimport { LokiDatasource } from './datasource';\nimport { LokiQuery } from './types';\n\n/**\n * Calculate a unique key for the query. The key is used to pick a channel and should\n * be unique for each distinct query execution plan. This key is not secure and is only picked to avoid\n * possible collisions\n */\nexport async function getLiveStreamKey(query: LokiQuery): Promise {\n const str = JSON.stringify({ expr: query.expr });\n\n const msgUint8 = new TextEncoder().encode(str); // encode as (utf-8) Uint8Array\n const hashBuffer = await crypto.subtle.digest('SHA-1', msgUint8); // hash the message\n const hashArray = Array.from(new Uint8Array(hashBuffer.slice(0, 8))); // first 8 bytes\n return hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');\n}\n\n// This will get both v1 and v2 result formats\nexport function doLokiChannelStream(\n query: LokiQuery,\n ds: LokiDatasource,\n options: DataQueryRequest\n): Observable {\n // maximum time to keep values\n const range = options.range;\n const maxDelta = range.to.valueOf() - range.from.valueOf() + 1000;\n let maxLength = options.maxDataPoints ?? 1000;\n if (maxLength > 100) {\n // for small buffers, keep them small\n maxLength *= 2;\n }\n\n let frame: StreamingDataFrame | undefined = undefined;\n const updateFrame = (msg: any) => {\n if (msg?.message) {\n const p: DataFrameJSON = msg.message;\n if (!frame) {\n frame = StreamingDataFrame.fromDataFrameJSON(p, {\n maxLength,\n maxDelta,\n displayNameFormat: query.legendFormat,\n });\n } else {\n frame.push(p);\n }\n }\n return frame;\n };\n\n return defer(() => getLiveStreamKey(query)).pipe(\n mergeMap((key) => {\n return getGrafanaLiveSrv()\n .getStream({\n scope: LiveChannelScope.DataSource,\n namespace: ds.uid,\n path: `tail/${key}`,\n data: {\n ...query,\n timeRange: {\n from: range.from.valueOf().toString(),\n to: range.to.valueOf().toString(),\n },\n },\n })\n .pipe(\n map((evt) => {\n const frame = updateFrame(evt);\n return {\n data: frame ? [frame] : [],\n state: LoadingState.Streaming,\n };\n })\n );\n })\n );\n}\n\nexport const convertToWebSocketUrl = (url: string) => {\n const protocol = window.location.protocol === 'https:' ? 'wss://' : 'ws://';\n let backend = `${protocol}${window.location.host}${config.appSubUrl}`;\n if (backend.endsWith('/')) {\n backend = backend.slice(0, -1);\n }\n return `${backend}${url}`;\n};\n","import { cloneDeep, map as lodashMap } from 'lodash';\nimport { lastValueFrom, merge, Observable, of, throwError } from 'rxjs';\nimport { catchError, map, switchMap, tap } from 'rxjs/operators';\n\nimport {\n AbstractQuery,\n AnnotationEvent,\n AnnotationQueryRequest,\n CoreApp,\n DataFrame,\n DataFrameView,\n DataQueryRequest,\n DataQueryResponse,\n DataSourceInstanceSettings,\n DataSourceWithLogsContextSupport,\n DataSourceWithSupplementaryQueriesSupport,\n SupplementaryQueryType,\n DataSourceWithQueryExportSupport,\n DataSourceWithQueryImportSupport,\n Labels,\n LoadingState,\n LogRowModel,\n QueryFixAction,\n QueryHint,\n rangeUtil,\n ScopedVars,\n SupplementaryQueryOptions,\n TimeRange,\n LogRowContextOptions,\n DataSourceWithToggleableQueryFiltersSupport,\n ToggleFilterAction,\n QueryFilterOptions,\n renderLegendFormat,\n LegacyMetricFindQueryOptions,\n AdHocVariableFilter,\n urlUtil,\n MetricFindValue,\n DataSourceGetTagValuesOptions,\n DataSourceGetTagKeysOptions,\n DataSourceWithQueryModificationSupport,\n} from '@grafana/data';\nimport { Duration } from '@grafana/lezer-logql';\nimport { BackendSrvRequest, config, DataSourceWithBackend, getTemplateSrv, TemplateSrv } from '@grafana/runtime';\nimport { DataQuery } from '@grafana/schema';\n\nimport LanguageProvider from './LanguageProvider';\nimport { LiveStreams, LokiLiveTarget } from './LiveStreams';\nimport { LogContextProvider } from './LogContextProvider';\nimport { LokiVariableSupport } from './LokiVariableSupport';\nimport { transformBackendResult } from './backendResultTransformer';\nimport { LokiAnnotationsQueryEditor } from './components/AnnotationsQueryEditor';\nimport { placeHolderScopedVars } from './components/monaco-query-field/monaco-completion-provider/validation';\nimport { escapeLabelValueInSelector, isRegexSelector, getLabelTypeFromFrame } from './languageUtils';\nimport { labelNamesRegex, labelValuesRegex } from './migrations/variableQueryMigrations';\nimport {\n addLabelFormatToQuery,\n addLabelToQuery,\n addNoPipelineErrorToQuery,\n addParserToQuery,\n removeCommentsFromQuery,\n addFilterAsLabelFilter,\n getParserPositions,\n toLabelFilter,\n addLineFilter,\n findLastPosition,\n getLabelFilterPositions,\n queryHasFilter,\n removeLabelFromQuery,\n} from './modifyQuery';\nimport { getQueryHints } from './queryHints';\nimport { runSplitQuery } from './querySplitting';\nimport {\n getLogQueryFromMetricsQuery,\n getLokiQueryFromDataQuery,\n getNodesFromQuery,\n getNormalizedLokiQuery,\n getStreamSelectorsFromQuery,\n isLogsQuery,\n isQueryWithError,\n requestSupportsSplitting,\n} from './queryUtils';\nimport { replaceVariables, returnVariables } from './querybuilder/parsingUtils';\nimport { convertToWebSocketUrl, doLokiChannelStream } from './streaming';\nimport { trackQuery } from './tracking';\nimport {\n LokiOptions,\n LokiQuery,\n LokiQueryType,\n LokiVariableQuery,\n LokiVariableQueryType,\n QueryStats,\n SupportingQueryType,\n} from './types';\n\nexport type RangeQueryOptions = DataQueryRequest | AnnotationQueryRequest;\nexport const DEFAULT_MAX_LINES = 1000;\nexport const DEFAULT_MAX_LINES_SAMPLE = 10;\nexport const LOKI_ENDPOINT = '/loki/api/v1';\nexport const REF_ID_DATA_SAMPLES = 'loki-data-samples';\nexport const REF_ID_STARTER_ANNOTATION = 'annotation-';\nexport const REF_ID_STARTER_LOG_ROW_CONTEXT = 'log-row-context-query-';\nexport const REF_ID_STARTER_LOG_VOLUME = 'log-volume-';\nexport const REF_ID_STARTER_LOG_SAMPLE = 'log-sample-';\nexport const REF_ID_STARTER_STATS = 'log-stats-';\n\nconst NS_IN_MS = 1000000;\n\nexport function makeRequest(\n query: LokiQuery,\n range: TimeRange,\n app: CoreApp,\n requestId: string,\n hideFromInspector?: boolean\n): DataQueryRequest {\n const intervalInfo = rangeUtil.calculateInterval(range, 1);\n return {\n targets: [query],\n requestId,\n interval: intervalInfo.interval,\n intervalMs: intervalInfo.intervalMs,\n range: range,\n scopedVars: {},\n timezone: 'UTC',\n app,\n startTime: Date.now(),\n hideFromInspector,\n };\n}\n\nexport class LokiDatasource\n extends DataSourceWithBackend\n implements\n DataSourceWithLogsContextSupport,\n DataSourceWithSupplementaryQueriesSupport,\n DataSourceWithQueryImportSupport,\n DataSourceWithQueryExportSupport,\n DataSourceWithToggleableQueryFiltersSupport,\n DataSourceWithQueryModificationSupport\n{\n private streams = new LiveStreams();\n private logContextProvider: LogContextProvider;\n languageProvider: LanguageProvider;\n maxLines: number;\n predefinedOperations: string;\n\n constructor(\n private instanceSettings: DataSourceInstanceSettings,\n private readonly templateSrv: TemplateSrv = getTemplateSrv()\n ) {\n super(instanceSettings);\n\n this.languageProvider = new LanguageProvider(this);\n const settingsData = instanceSettings.jsonData || {};\n this.maxLines = parseInt(settingsData.maxLines ?? '0', 10) || DEFAULT_MAX_LINES;\n this.predefinedOperations = settingsData.predefinedOperations ?? '';\n this.annotations = {\n QueryEditor: LokiAnnotationsQueryEditor,\n };\n this.variables = new LokiVariableSupport(this);\n this.logContextProvider = new LogContextProvider(this);\n }\n\n /**\n * Implemented for DataSourceWithSupplementaryQueriesSupport.\n * It generates a DataQueryRequest for a specific supplementary query type.\n * @returns A DataQueryRequest for the supplementary queries or undefined if not supported.\n */\n getSupplementaryRequest(\n type: SupplementaryQueryType,\n request: DataQueryRequest\n ): DataQueryRequest | undefined {\n switch (type) {\n case SupplementaryQueryType.LogsVolume:\n return this.getLogsVolumeDataProvider(request);\n case SupplementaryQueryType.LogsSample:\n return this.getLogsSampleDataProvider(request);\n default:\n return undefined;\n }\n }\n\n /**\n * Implemented for DataSourceWithSupplementaryQueriesSupport.\n * It returns the supplementary types that the data source supports.\n * @returns An array of supported supplementary query types.\n */\n getSupportedSupplementaryQueryTypes(): SupplementaryQueryType[] {\n return [SupplementaryQueryType.LogsVolume, SupplementaryQueryType.LogsSample];\n }\n\n /**\n * Implemented for DataSourceWithSupplementaryQueriesSupport.\n * It retrieves supplementary queries based on the provided options and Loki query.\n * @returns A supplemented Loki query or undefined if unsupported.\n */\n getSupplementaryQuery(options: SupplementaryQueryOptions, query: LokiQuery): LokiQuery | undefined {\n const normalizedQuery = getNormalizedLokiQuery(query);\n let expr = removeCommentsFromQuery(normalizedQuery.expr);\n let isQuerySuitable = false;\n\n switch (options.type) {\n case SupplementaryQueryType.LogsVolume:\n // it has to be a logs-producing range-query\n isQuerySuitable = !!(expr && isLogsQuery(expr) && normalizedQuery.queryType === LokiQueryType.Range);\n if (!isQuerySuitable) {\n return undefined;\n }\n\n const dropErrorExpression = `${expr} | drop __error__`;\n if (isQueryWithError(this.interpolateString(dropErrorExpression, placeHolderScopedVars)) === false) {\n expr = dropErrorExpression;\n }\n\n return {\n ...normalizedQuery,\n refId: `${REF_ID_STARTER_LOG_VOLUME}${normalizedQuery.refId}`,\n queryType: LokiQueryType.Range,\n supportingQueryType: SupportingQueryType.LogsVolume,\n expr: `sum by (level) (count_over_time(${expr}[$__auto]))`,\n };\n\n case SupplementaryQueryType.LogsSample:\n // it has to be a metric query\n isQuerySuitable = !!(expr && !isLogsQuery(expr));\n if (!isQuerySuitable) {\n return undefined;\n }\n return {\n ...normalizedQuery,\n queryType: LokiQueryType.Range,\n refId: `${REF_ID_STARTER_LOG_SAMPLE}${normalizedQuery.refId}`,\n expr: getLogQueryFromMetricsQuery(expr),\n maxLines: Number.isNaN(Number(options.limit)) ? this.maxLines : Number(options.limit),\n };\n\n default:\n return undefined;\n }\n }\n\n /**\n * Private method used in the `getDataProvider` for DataSourceWithSupplementaryQueriesSupport, specifically for Logs volume queries.\n * @returns An Observable of DataQueryResponse or undefined if no suitable queries are found.\n */\n private getLogsVolumeDataProvider(request: DataQueryRequest): DataQueryRequest | undefined {\n const logsVolumeRequest = cloneDeep(request);\n const targets = logsVolumeRequest.targets\n .map((query) => this.getSupplementaryQuery({ type: SupplementaryQueryType.LogsVolume }, query))\n .filter((query): query is LokiQuery => !!query);\n\n if (!targets.length) {\n return undefined;\n }\n\n return { ...logsVolumeRequest, targets };\n }\n\n /**\n * Private method used in the `getDataProvider` for DataSourceWithSupplementaryQueriesSupport, specifically for Logs sample queries.\n * @returns An Observable of DataQueryResponse or undefined if no suitable queries are found.\n */\n private getLogsSampleDataProvider(request: DataQueryRequest): DataQueryRequest | undefined {\n const logsSampleRequest = cloneDeep(request);\n const targets = logsSampleRequest.targets\n .map((query) => this.getSupplementaryQuery({ type: SupplementaryQueryType.LogsSample, limit: 100 }, query))\n .filter((query): query is LokiQuery => !!query);\n\n if (!targets.length) {\n return undefined;\n }\n return { ...logsSampleRequest, targets };\n }\n\n /**\n * Required by DataSourceApi. It executes queries based on the provided DataQueryRequest.\n * @returns An Observable of DataQueryResponse containing the query results.\n */\n query(request: DataQueryRequest): Observable {\n const queries = request.targets\n .map(getNormalizedLokiQuery) // used to \"fix\" the deprecated `.queryType` prop\n .map((q) => ({ ...q, maxLines: q.maxLines ?? this.maxLines }));\n\n const fixedRequest: DataQueryRequest = {\n ...request,\n targets: queries,\n };\n\n const streamQueries = fixedRequest.targets.filter((q) => q.queryType === LokiQueryType.Stream);\n if (\n config.featureToggles.lokiExperimentalStreaming &&\n streamQueries.length > 0 &&\n fixedRequest.rangeRaw?.to === 'now'\n ) {\n // this is still an in-development feature,\n // we do not support mixing stream-queries with normal-queries for now.\n const streamRequest = {\n ...fixedRequest,\n targets: streamQueries,\n };\n return merge(\n ...streamQueries.map((q) =>\n doLokiChannelStream(\n this.applyTemplateVariables(q, request.scopedVars),\n this, // the datasource\n streamRequest\n )\n )\n );\n }\n\n if (fixedRequest.liveStreaming) {\n return this.runLiveQueryThroughBackend(fixedRequest);\n }\n\n if (config.featureToggles.lokiQuerySplitting && requestSupportsSplitting(fixedRequest.targets)) {\n return runSplitQuery(this, fixedRequest);\n }\n\n const startTime = new Date();\n return this.runQuery(fixedRequest).pipe(\n tap((response) =>\n trackQuery(response, fixedRequest, startTime, { predefinedOperations: this.predefinedOperations })\n )\n );\n }\n\n /**\n * Executes requests through the backend using the `super.query()`, as part of the `query` method in DataSourceWithBackend.\n * @returns An Observable of transformed DataQueryResponse results from the backend.\n */\n runQuery(fixedRequest: DataQueryRequest) {\n return super\n .query(fixedRequest)\n .pipe(\n map((response) =>\n transformBackendResult(response, fixedRequest.targets, this.instanceSettings.jsonData.derivedFields ?? [])\n )\n );\n }\n\n /**\n * Used within the `query` to execute live queries.\n * It is intended for explore-mode and logs-queries, not metric queries.\n * @returns An Observable of DataQueryResponse with live query results or an empty response if no suitable queries are found.\n * @todo: The name says \"backend\" but it's actually running the query through the frontend. We should fix this.\n */\n private runLiveQueryThroughBackend(request: DataQueryRequest): Observable {\n // this only works in explore-mode so variables don't need to be handled,\n // and only for logs-queries, not metric queries\n const logsQueries = request.targets.filter((query) => query.expr !== '' && isLogsQuery(query.expr));\n\n if (logsQueries.length === 0) {\n return of({\n data: [],\n state: LoadingState.Done,\n });\n }\n\n const subQueries = logsQueries.map((query) => {\n const maxDataPoints = query.maxLines || this.maxLines;\n // FIXME: currently we are running it through the frontend still.\n return this.runLiveQuery(query, maxDataPoints);\n });\n\n return merge(...subQueries);\n }\n\n /**\n * Used within the `runLiveQuery` to create a live target for a Loki query.\n * @returns A LokiLiveTarget object containing the necessary information for a live query.\n */\n private createLiveTarget(target: LokiQuery, maxDataPoints: number): LokiLiveTarget {\n const query = target.expr;\n const baseUrl = this.instanceSettings.url;\n const params = urlUtil.serializeParams({ query });\n\n return {\n query,\n url: convertToWebSocketUrl(`${baseUrl}/loki/api/v1/tail?${params}`),\n refId: target.refId,\n size: maxDataPoints,\n };\n }\n\n /**\n * Runs live queries, which involves creating a WebSocket connection to listen for new logs.\n * It returns a slightly different DataQueryResponse compared to runQueries. It provides a single DataFrame\n * even if there are multiple Loki streams. Common labels are set on dataFrame.labels, and unique labels per row are\n * available in dataFrame.fields.labels.\n * @returns An Observable of DataQueryResponse with streaming data or an error message if live tailing encounters an issue.\n */\n private runLiveQuery = (target: LokiQuery, maxDataPoints: number): Observable => {\n const liveTarget = this.createLiveTarget(target, maxDataPoints);\n\n return this.streams.getStream(liveTarget).pipe(\n map((data) => ({\n data: data || [],\n key: `loki-${liveTarget.refId}`,\n state: LoadingState.Streaming,\n })),\n catchError((err: any) => {\n return throwError(() => `Live tailing was stopped due to following error: ${err.reason}`);\n })\n );\n };\n\n /**\n * Implemented as a part of DataSourceApi. Interpolates variables and adds ad hoc filters to a list of Loki queries.\n * @returns An array of expanded Loki queries with interpolated variables and ad hoc filters.\n */\n interpolateVariablesInQueries(\n queries: LokiQuery[],\n scopedVars: ScopedVars,\n adhocFilters?: AdHocVariableFilter[]\n ): LokiQuery[] {\n let expandedQueries = queries;\n if (queries && queries.length) {\n expandedQueries = queries.map((query) => ({\n ...query,\n datasource: this.getRef(),\n expr: this.addAdHocFilters(\n this.templateSrv.replace(query.expr, scopedVars, this.interpolateQueryExpr),\n adhocFilters\n ),\n }));\n }\n\n return expandedQueries;\n }\n\n /**\n * Implemented as part of DataSourceApi. Converts a Loki query to a simple text string.\n * Used, for example, in Query history.\n * @returns A text representation of the query.\n */\n getQueryDisplayText(query: LokiQuery) {\n return query.expr;\n }\n\n /**\n * Given a time range, returns it as Loki parameters.\n * @returns An object containing the start and end times in nanoseconds since the Unix epoch.\n */\n getTimeRangeParams(timeRange: TimeRange) {\n return { start: timeRange.from.valueOf() * NS_IN_MS, end: timeRange.to.valueOf() * NS_IN_MS };\n }\n\n /**\n * Implemented as part of DataSourceWithQueryImportSupport.\n * Imports queries from AbstractQuery objects when switching between different data source types.\n * @returns A Promise that resolves to an array of Loki queries.\n */\n async importFromAbstractQueries(abstractQueries: AbstractQuery[]): Promise {\n await this.languageProvider.start();\n const existingKeys = this.languageProvider.labelKeys;\n\n if (existingKeys && existingKeys.length) {\n abstractQueries = abstractQueries.map((abstractQuery) => {\n abstractQuery.labelMatchers = abstractQuery.labelMatchers.filter((labelMatcher) => {\n return existingKeys.includes(labelMatcher.name);\n });\n return abstractQuery;\n });\n }\n\n return abstractQueries.map((abstractQuery) => this.languageProvider.importFromAbstractQuery(abstractQuery));\n }\n\n /**\n * Implemented as part of DataSourceWithQueryImportSupport.\n * Exports Loki queries to AbstractQuery objects when switching between different data source types.\n * @returns A Promise that resolves to an array of AbstractQuery objects.\n */\n async exportToAbstractQueries(queries: LokiQuery[]): Promise {\n return queries.map((query) => this.languageProvider.exportToAbstractQuery(query));\n }\n\n /**\n * A method that wraps `getResource` from DataSourceWithBackend to perform metadata requests, with an additional check for valid URL values.\n * @returns A Promise that resolves to the data retrieved from the metadata request, or an empty array if no data is available.\n */\n async metadataRequest(url: string, params?: Record, options?: Partial) {\n // url must not start with a `/`, otherwise the AJAX-request\n // going from the browser will contain `//`, which can cause problems.\n if (url.startsWith('/')) {\n throw new Error(`invalid metadata request url: ${url}`);\n }\n\n const res = await this.getResource(url, params, options);\n return res.data || [];\n }\n\n /**\n * Used in `getQueryStats`. It wraps `getResource` from DataSourceWithBackend to perform a stats request\n * Specifically designed for the stats endpoint, which does not return data but includes stats directly in the response object.\n * @returns A Promise that resolves to a QueryStats object containing the statistics retrieved from the stats request.\n */\n async statsMetadataRequest(\n url: string,\n params?: Record,\n options?: Partial\n ): Promise {\n if (url.startsWith('/')) {\n throw new Error(`invalid metadata request url: ${url}`);\n }\n\n return await this.getResource(url, params, options);\n }\n\n /**\n * Used in `getStats`. Retrieves statistics for a Loki query and processes them into a QueryStats object.\n * @returns A Promise that resolves to a QueryStats object containing the query statistics or undefined if the query is invalid.\n */\n async getQueryStats(query: LokiQuery, timeRange: TimeRange): Promise {\n // if query is invalid, clear stats, and don't request\n if (isQueryWithError(this.interpolateString(query.expr, placeHolderScopedVars))) {\n return undefined;\n }\n\n const labelMatchers = getStreamSelectorsFromQuery(query.expr);\n let statsForAll: QueryStats = { streams: 0, chunks: 0, bytes: 0, entries: 0 };\n\n for (const idx in labelMatchers) {\n const { start, end } = this.getStatsTimeRange(query, Number(idx), timeRange);\n\n if (start === undefined || end === undefined) {\n return { streams: 0, chunks: 0, bytes: 0, entries: 0, message: 'Query size estimate not available.' };\n }\n\n try {\n const data = await this.statsMetadataRequest(\n 'index/stats',\n {\n query: labelMatchers[idx],\n start: start,\n end: end,\n },\n { showErrorAlert: false, requestId: `${REF_ID_STARTER_STATS}${query.refId}` }\n );\n\n statsForAll = {\n streams: statsForAll.streams + data.streams,\n chunks: statsForAll.chunks + data.chunks,\n bytes: statsForAll.bytes + data.bytes,\n entries: statsForAll.entries + data.entries,\n };\n } catch (e) {\n break;\n }\n }\n\n return statsForAll;\n }\n\n /**\n * Used within the `getQueryStats`. Retrieves the time range for a Loki stats query, adjusting it to cover the requested period.\n * In metric queries, this means extending it over the range interval.\n * @returns An object containing the start and end time in nanoseconds (NS_IN_MS) or undefined if the time range cannot be estimated.\n */\n\n getStatsTimeRange(\n query: LokiQuery,\n idx: number,\n timeRange: TimeRange\n ): { start: number | undefined; end: number | undefined } {\n let start: number, end: number;\n const NS_IN_MS = 1000000;\n const durationNodes = getNodesFromQuery(query.expr, [Duration]);\n const durations = durationNodes.map((d) => query.expr.substring(d.from, d.to));\n\n if (isLogsQuery(query.expr)) {\n // logs query with instant type can not be estimated\n if (query.queryType === LokiQueryType.Instant) {\n return { start: undefined, end: undefined };\n }\n // logs query with range type\n return this.getTimeRangeParams(timeRange);\n }\n\n if (query.queryType === LokiQueryType.Instant) {\n // metric query with instant type\n\n if (!!durations[idx]) {\n // if query has a duration e.g. [1m]\n end = this.getTimeRangeParams(timeRange).end;\n start = end - rangeUtil.intervalToMs(durations[idx]) * NS_IN_MS;\n return { start, end };\n } else {\n // if query has no duration e.g. [$__interval]\n\n if (/(\\$__auto|\\$__range)/.test(query.expr)) {\n // if $__auto or $__range is used, we can estimate the time range using the selected range\n return this.getTimeRangeParams(timeRange);\n }\n\n // otherwise we cant estimate the time range\n return { start: undefined, end: undefined };\n }\n }\n\n // metric query with range type\n return this.getTimeRangeParams(timeRange);\n }\n\n /**\n * Retrieves statistics for a Loki query and returns the QueryStats object.\n * @returns A Promise that resolves to a QueryStats object or null if the query is invalid or has no statistics.\n */\n async getStats(query: LokiQuery, timeRange: TimeRange): Promise {\n if (!query.expr) {\n return null;\n }\n\n const response = await this.getQueryStats(query, timeRange);\n\n if (!response) {\n return null;\n }\n\n return Object.values(response).every((v) => v === 0) ? null : response;\n }\n\n /**\n * Implemented as part of DataSourceAPI and used for template variable queries.\n * @returns A Promise that resolves to an array of results from the metric find query.\n */\n async metricFindQuery(\n query: LokiVariableQuery | string,\n options?: LegacyMetricFindQueryOptions\n ): Promise {\n if (!query) {\n return Promise.resolve([]);\n }\n\n let interpolatedVariableQuery: LokiVariableQuery | undefined;\n\n if (typeof query === 'string') {\n interpolatedVariableQuery = this.parseStringToVariableQuery(this.interpolateString(query, options?.scopedVars));\n } else {\n interpolatedVariableQuery = {\n ...query,\n label: this.interpolateString(query.label || '', options?.scopedVars),\n stream: this.interpolateString(query.stream || '', options?.scopedVars),\n };\n }\n\n if (interpolatedVariableQuery) {\n return await this.processMetricFindQuery(interpolatedVariableQuery, options?.range);\n }\n\n return Promise.resolve([]);\n }\n\n /**\n * Used within the `metricFindQuery`. Retrieves the correct variable results based on the provided LokiVariableQuery.\n * @returns A Promise that resolves to an array of variable results based on the query type and parameters.\n */\n\n private async processMetricFindQuery(query: LokiVariableQuery, timeRange?: TimeRange): Promise {\n if (query.type === LokiVariableQueryType.LabelNames) {\n const result = await this.languageProvider.fetchLabels({ timeRange });\n return result.map((value: string) => ({ text: value }));\n }\n\n if (!query.label) {\n return [];\n }\n\n // If we have stream selector, use /series endpoint\n if (query.stream) {\n const result = await this.languageProvider.fetchSeriesLabels(query.stream, { timeRange });\n if (!result[query.label]) {\n return [];\n }\n return result[query.label].map((value: string) => ({ text: value }));\n }\n\n const result = await this.languageProvider.fetchLabelValues(query.label, { timeRange });\n return result.map((value: string) => ({ text: value }));\n }\n\n /**\n * Used in `metricFindQuery` to process legacy query strings (label_name() and label_values()) to variable query objects.\n * @returns LokiVariableQuery object based on the provided query string, or undefined if string can't be parsed.\n */\n private parseStringToVariableQuery(query: string): LokiVariableQuery | undefined {\n const refId = 'LokiVariableQueryEditor-VariableQuery';\n const labelNames = query.match(labelNamesRegex);\n if (labelNames) {\n return {\n type: LokiVariableQueryType.LabelNames,\n refId,\n };\n }\n\n const labelValues = query.match(labelValuesRegex);\n if (labelValues) {\n return {\n type: LokiVariableQueryType.LabelValues,\n label: labelValues[2],\n stream: labelValues[1],\n refId,\n };\n }\n return undefined;\n }\n\n /**\n * Used to fetch data samples, typically for autocompletion and query building to recommend parsers, labels, and values based on sampled data.\n * Currently, it works for logs data only.\n * @returns A Promise that resolves to an array of DataFrames containing data samples.\n */\n async getDataSamples(query: LokiQuery, timeRange: TimeRange): Promise {\n // Currently works only for logs sample\n if (!isLogsQuery(query.expr) || isQueryWithError(this.interpolateString(query.expr, placeHolderScopedVars))) {\n return [];\n }\n\n const lokiLogsQuery: LokiQuery = {\n expr: query.expr,\n queryType: LokiQueryType.Range,\n refId: REF_ID_DATA_SAMPLES,\n maxLines: query.maxLines || DEFAULT_MAX_LINES_SAMPLE,\n supportingQueryType: SupportingQueryType.DataSample,\n };\n\n const request = makeRequest(lokiLogsQuery, timeRange, CoreApp.Unknown, REF_ID_DATA_SAMPLES, true);\n return await lastValueFrom(this.query(request).pipe(switchMap((res) => of(res.data))));\n }\n\n /**\n * Implemented as part of the DataSourceAPI. Retrieves tag keys that can be used for ad-hoc filtering.\n * @returns A Promise that resolves to an array of label names represented as MetricFindValue objects.\n */\n async getTagKeys(options?: DataSourceGetTagKeysOptions): Promise {\n const result = await this.languageProvider.fetchLabels({ timeRange: options?.timeRange });\n return result.map((value: string) => ({ text: value }));\n }\n\n /**\n * Implemented as part of the DataSourceAPI. Retrieves tag values that can be used for ad-hoc filtering.\n * @returns A Promise that resolves to an array of label values represented as MetricFindValue objects\n */\n async getTagValues(options: DataSourceGetTagValuesOptions): Promise {\n const result = await this.languageProvider.fetchLabelValues(options.key, { timeRange: options.timeRange });\n return result.map((value: string) => ({ text: value }));\n }\n\n /**\n * Used for interpolation logic in `interpolateVariablesInQueries` and `applyTemplateVariables`.\n * Handles escaping of special characters based on variable type and value.\n * @returns The interpolated value with appropriate character escaping.\n */\n interpolateQueryExpr(value: any, variable: any) {\n // if no multi or include all do not regexEscape\n if (!variable.multi && !variable.includeAll) {\n return lokiRegularEscape(value);\n }\n\n if (typeof value === 'string') {\n return lokiSpecialRegexEscape(value);\n }\n\n const escapedValues = lodashMap(value, lokiSpecialRegexEscape);\n return escapedValues.join('|');\n }\n\n /**\n * Implemented for `DataSourceWithToggleableQueryFiltersSupport`. Toggles a filter on or off based on the provided filter action.\n * It is used for example in Explore to toggle fields on and off trough log details.\n * @returns A new LokiQuery with the filter toggled as specified.\n */\n toggleQueryFilter(query: LokiQuery, filter: ToggleFilterAction): LokiQuery {\n let expression = query.expr ?? '';\n const labelType = getLabelTypeFromFrame(filter.options.key, filter.frame, 0);\n switch (filter.type) {\n case 'FILTER_FOR': {\n if (filter.options?.key && filter.options?.value) {\n const value = escapeLabelValueInSelector(filter.options.value);\n\n // This gives the user the ability to toggle a filter on and off.\n expression = queryHasFilter(expression, filter.options.key, '=', value)\n ? removeLabelFromQuery(expression, filter.options.key, '=', value)\n : addLabelToQuery(expression, filter.options.key, '=', value, labelType);\n }\n break;\n }\n case 'FILTER_OUT': {\n if (filter.options?.key && filter.options?.value) {\n const value = escapeLabelValueInSelector(filter.options.value);\n\n /**\n * If there is a filter with the same key and value, remove it.\n * This prevents the user from seeing no changes in the query when they apply\n * this filter.\n */\n if (queryHasFilter(expression, filter.options.key, '=', value)) {\n expression = removeLabelFromQuery(expression, filter.options.key, '=', value);\n }\n\n expression = addLabelToQuery(expression, filter.options.key, '!=', value, labelType);\n }\n break;\n }\n default:\n break;\n }\n return { ...query, expr: expression };\n }\n\n /**\n * Implemented for `DataSourceWithToggleableQueryFiltersSupport`. Checks if a query expression contains a filter based on the provided filter options.\n * @returns A boolean value indicating whether the filter exists in the query expression.\n */\n queryHasFilter(query: LokiQuery, filter: QueryFilterOptions): boolean {\n let expression = query.expr ?? '';\n return queryHasFilter(expression, filter.key, '=', filter.value);\n }\n\n /**\n * Implemented as part of `DataSourceWithQueryModificationSupport`. Used to modify a query based on the provided action.\n * It is used, for example, in the Query Builder to apply hints such as parsers, operations, etc.\n * @returns A new LokiQuery with the specified modification applied.\n */\n modifyQuery(query: LokiQuery, action: QueryFixAction): LokiQuery {\n let expression = query.expr ?? '';\n // NB: Usually the labelKeys should be fetched and cached in the datasource,\n // but there might be some edge cases where this wouldn't be the case.\n // However the changed would make this method `async`.\n switch (action.type) {\n case 'ADD_FILTER': {\n if (action.options?.key && action.options?.value) {\n const labelType = getLabelTypeFromFrame(action.options.key, action.frame, 0);\n const value = escapeLabelValueInSelector(action.options.value);\n expression = addLabelToQuery(expression, action.options.key, '=', value, labelType);\n }\n break;\n }\n case 'ADD_FILTER_OUT': {\n if (action.options?.key && action.options?.value) {\n const labelType = getLabelTypeFromFrame(action.options.key, action.frame, 0);\n const value = escapeLabelValueInSelector(action.options.value);\n expression = addLabelToQuery(expression, action.options.key, '!=', value, labelType);\n }\n break;\n }\n case 'ADD_LOGFMT_PARSER': {\n expression = addParserToQuery(expression, 'logfmt');\n break;\n }\n case 'ADD_JSON_PARSER': {\n expression = addParserToQuery(expression, 'json');\n break;\n }\n case 'ADD_UNPACK_PARSER': {\n expression = addParserToQuery(expression, 'unpack');\n break;\n }\n case 'ADD_NO_PIPELINE_ERROR': {\n expression = addNoPipelineErrorToQuery(expression);\n break;\n }\n case 'ADD_LEVEL_LABEL_FORMAT': {\n if (action.options?.originalLabel && action.options?.renameTo) {\n expression = addLabelFormatToQuery(expression, {\n renameTo: action.options.renameTo,\n originalLabel: action.options.originalLabel,\n });\n }\n break;\n }\n case 'ADD_LABEL_FILTER': {\n const parserPositions = getParserPositions(query.expr);\n const labelFilterPositions = getLabelFilterPositions(query.expr);\n const lastPosition = findLastPosition([...parserPositions, ...labelFilterPositions]);\n const filter = toLabelFilter('', '', '=');\n expression = addFilterAsLabelFilter(expression, [lastPosition], filter);\n break;\n }\n case 'ADD_STRING_FILTER':\n case 'ADD_LINE_FILTER': {\n expression = addLineFilter(expression, action.options?.value);\n break;\n }\n case 'ADD_STRING_FILTER_OUT':\n case 'ADD_LINE_FILTER_OUT': {\n expression = addLineFilter(expression, action.options?.value, '!=');\n break;\n }\n default:\n break;\n }\n return { ...query, expr: expression };\n }\n\n /**\n * Implemented as part of `DataSourceWithQueryModificationSupport`. Returns a list of operation\n * types that are supported by `modifyQuery()`.\n */\n getSupportedQueryModifications() {\n return [\n 'ADD_FILTER',\n 'ADD_FILTER_OUT',\n 'ADD_LOGFMT_PARSER',\n 'ADD_JSON_PARSER',\n 'ADD_UNPACK_PARSER',\n 'ADD_NO_PIPELINE_ERROR',\n 'ADD_LEVEL_LABEL_FORMAT',\n 'ADD_LABEL_FILTER',\n 'ADD_STRING_FILTER',\n 'ADD_STRING_FILTER_OUT',\n ];\n }\n\n /**\n * Part of `DataSourceWithLogsContextSupport`, used to retrieve log context for a log row.\n * @returns A promise that resolves to an object containing the log context data as DataFrames.\n */\n getLogRowContext = async (\n row: LogRowModel,\n options?: LogRowContextOptions,\n origQuery?: DataQuery\n ): Promise<{ data: DataFrame[] }> => {\n return await this.logContextProvider.getLogRowContext(row, options, getLokiQueryFromDataQuery(origQuery));\n };\n /**\n * Part of `DataSourceWithLogsContextSupport`, used to retrieve the log context query for the provided log row and original query.\n * @returns A promise that resolves to a DataQuery representing the log context query.\n */\n getLogRowContextQuery = async (\n row: LogRowModel,\n options?: LogRowContextOptions,\n origQuery?: DataQuery,\n cacheFilters?: boolean\n ): Promise => {\n return await this.logContextProvider.getLogRowContextQuery(\n row,\n options,\n getLokiQueryFromDataQuery(origQuery),\n cacheFilters\n );\n };\n\n /**\n * Part of `DataSourceWithLogsContextSupport`, used to retrieve the log context UI for the provided log row and original query.\n * @returns A React component or element representing the log context UI for the log row.\n */\n getLogRowContextUi(row: LogRowModel, runContextQuery: () => void, origQuery: DataQuery): React.ReactNode {\n return this.logContextProvider.getLogRowContextUi(row, runContextQuery, getLokiQueryFromDataQuery(origQuery));\n }\n\n /**\n * Implemented as part of the DataSourceAPI. It allows the datasource to serve as a source of annotations for a dashboard.\n * @returns A promise that resolves to an array of AnnotationEvent objects representing the annotations for the dashboard.\n * @todo This is deprecated and it is recommended to use the `AnnotationSupport` feature for annotations.\n */\n async annotationQuery(options: any): Promise {\n const { expr, maxLines, instant, tagKeys = '', titleFormat = '', textFormat = '' } = options.annotation;\n\n if (!expr) {\n return [];\n }\n\n const id = `${REF_ID_STARTER_ANNOTATION}${options.annotation.name}`;\n\n const query: LokiQuery = {\n refId: id,\n expr,\n maxLines,\n instant,\n queryType: instant ? LokiQueryType.Instant : LokiQueryType.Range,\n };\n\n const request = makeRequest(query, options.range, CoreApp.Dashboard, id);\n\n const { data } = await lastValueFrom(this.query(request));\n\n const annotations: AnnotationEvent[] = [];\n const splitKeys: string[] = tagKeys.split(',').filter((v: string) => v !== '');\n\n for (const frame of data) {\n const view = new DataFrameView<{ Time: string; Line: string; labels: Labels }>(frame);\n\n view.forEach((row) => {\n const { labels } = row;\n\n const maybeDuplicatedTags = Object.entries(labels)\n .map(([key, val]) => [key, val.trim()]) // trim all label-values\n .filter(([key, val]) => {\n if (val === '') {\n // remove empty\n return false;\n }\n\n // if tags are specified, remove label if does not match tags\n if (splitKeys.length && !splitKeys.includes(key)) {\n return false;\n }\n\n return true;\n })\n .map(([key, val]) => val); // keep only the label-value\n\n // remove duplicates\n const tags = Array.from(new Set(maybeDuplicatedTags));\n\n annotations.push({\n time: new Date(row.Time).valueOf(),\n title: renderLegendFormat(titleFormat, labels),\n text: renderLegendFormat(textFormat, labels) || row.Line,\n tags,\n });\n });\n }\n\n return annotations;\n }\n\n /**\n * Adds ad hoc filters to a query expression, handling proper escaping of filter values.\n * @returns The query expression with ad hoc filters and correctly escaped values.\n * @todo this.templateSrv.getAdhocFilters() is deprecated\n */\n addAdHocFilters(queryExpr: string, adhocFilters?: AdHocVariableFilter[]) {\n if (!adhocFilters) {\n return queryExpr;\n }\n\n let expr = replaceVariables(queryExpr);\n\n expr = adhocFilters.reduce((acc: string, filter: { key: string; operator: string; value: string }) => {\n const { key, operator } = filter;\n let { value } = filter;\n if (isRegexSelector(operator)) {\n // Adhoc filters don't support multiselect, therefore if user selects regex operator\n // we are going to consider value to be regex filter and use lokiRegularEscape\n // that does not escape regex special characters (e.g. .*test.* => .*test.*)\n value = lokiRegularEscape(value);\n } else {\n // Otherwise, we want to escape special characters in value\n value = escapeLabelValueInSelector(value, operator);\n }\n return addLabelToQuery(acc, key, operator, value);\n }, expr);\n\n return returnVariables(expr);\n }\n\n /**\n * Filters out queries that are empty or hidden. Used when running queries through backend.\n * It is called from DatasourceWithBackend.\n * @returns `true` if the query is not hidden and its expression is not empty; `false` otherwise.\n */\n filterQuery(query: LokiQuery): boolean {\n if (query.hide || query.expr === '') {\n return false;\n }\n return true;\n }\n\n /**\n * Applies template variables and add hoc filters to a query. Used when running queries through backend.\n * It is called from DatasourceWithBackend.\n * @returns A modified Loki query with template variables and ad hoc filters applied.\n */\n applyTemplateVariables(target: LokiQuery, scopedVars: ScopedVars, adhocFilters?: AdHocVariableFilter[]): LokiQuery {\n // We want to interpolate these variables on backend because we support using them in\n // alerting/ML queries and we want to have consistent interpolation for all queries\n const { __auto, __interval, __interval_ms, __range, __range_s, __range_ms, ...rest } = scopedVars || {};\n\n const exprWithAdHoc = this.addAdHocFilters(target.expr, adhocFilters);\n\n const variables = {\n ...rest,\n\n // pass through for backend interpolation. Need to be in scopedVars for Scenes though\n __interval: {\n value: '$__interval',\n },\n __interval_ms: {\n value: '$__interval_ms',\n },\n };\n return {\n ...target,\n legendFormat: this.templateSrv.replace(target.legendFormat, rest),\n expr: this.templateSrv.replace(exprWithAdHoc, variables, this.interpolateQueryExpr),\n };\n }\n\n /**\n * Interpolates template variables in a given string. Template variables are passed trough scopedVars.\n * @returns The string with template variables replaced by their values.\n */\n interpolateString(string: string, scopedVars?: ScopedVars) {\n return this.templateSrv.replace(string, scopedVars, this.interpolateQueryExpr);\n }\n\n /**\n * Retrieves and returns a list of variable names used in the template service.\n * Used for example in the Query Builder to populate the variable dropdown with template variables.\n * @returns An array of variable names, each prefixed with '$'.\n */\n getVariables(): string[] {\n return this.templateSrv.getVariables().map((v) => `$${v.name}`);\n }\n /**\n * Retrieves query hints for query improvements based on a Loki query and its result data.\n * Used in Query builder to provide hints for query improvements, such as adding a parser, etc.\n * @returns An array of query hints for potential query improvements.\n */\n getQueryHints(query: LokiQuery, result: DataFrame[]): QueryHint[] {\n return getQueryHints(query.expr, result);\n }\n\n /**\n * Get a default LokiQuery based on the specified app. Currently used in UnifiedAlerting.\n * @returns A default LokiQuery object with appropriate settings for the given application.\n */\n getDefaultQuery(app: CoreApp): LokiQuery {\n const defaults = { refId: 'A', expr: '' };\n\n if (app === CoreApp.UnifiedAlerting) {\n return {\n ...defaults,\n queryType: LokiQueryType.Instant,\n };\n }\n\n return {\n ...defaults,\n queryType: LokiQueryType.Range,\n };\n }\n}\n\n// NOTE: these two functions are very similar to the escapeLabelValueIn* functions\n// in language_utils.ts, but they are not exactly the same algorithm, and we found\n// no way to reuse one in the another or vice versa.\nexport function lokiRegularEscape(value: any) {\n if (typeof value === 'string') {\n return value.replace(/'/g, \"\\\\\\\\'\");\n }\n return value;\n}\n\nexport function lokiSpecialRegexEscape(value: any) {\n if (typeof value === 'string') {\n return lokiRegularEscape(value.replace(/\\\\/g, '\\\\\\\\\\\\\\\\').replace(/[$^*{}\\[\\]+?.()|]/g, '\\\\\\\\$&'));\n }\n return value;\n}\n","import { NodeType, SyntaxNode } from '@lezer/common';\nimport { sortBy } from 'lodash';\n\nimport { QueryBuilderLabelFilter } from '@grafana/experimental';\nimport {\n Identifier,\n LabelFilter,\n LabelParser,\n LineComment,\n LineFilters,\n LogExpr,\n LogRangeExpr,\n Matcher,\n parser,\n PipelineExpr,\n Selector,\n UnwrapExpr,\n String,\n PipelineStage,\n LogfmtParser,\n JsonExpressionParser,\n LogfmtExpressionParser,\n Expr,\n} from '@grafana/lezer-logql';\n\nimport { unescapeLabelValue } from './languageUtils';\nimport { getNodePositionsFromQuery } from './queryUtils';\nimport { lokiQueryModeller as modeller } from './querybuilder/LokiQueryModeller';\nimport { buildVisualQueryFromString, handleQuotes } from './querybuilder/parsing';\nimport { LabelType } from './types';\n\nexport class NodePosition {\n from: number;\n to: number;\n type?: NodeType;\n\n constructor(from: number, to: number, type?: NodeType) {\n this.from = from;\n this.to = to;\n this.type = type;\n }\n\n static fromNode(node: SyntaxNode): NodePosition {\n return new NodePosition(node.from, node.to, node.type);\n }\n\n contains(position: NodePosition): boolean {\n return this.from <= position.from && this.to >= position.to;\n }\n\n getExpression(query: string): string {\n return query.substring(this.from, this.to);\n }\n}\n\n/**\n * Checks for the presence of a given label=value filter in any Matcher expression in the query.\n */\nexport function queryHasFilter(query: string, key: string, operator: string, value: string): boolean {\n const matchers = getMatchersWithFilter(query, key, operator, value);\n return matchers.length > 0;\n}\n\n/**\n * Removes a label=value Matcher expression from the query.\n */\nexport function removeLabelFromQuery(query: string, key: string, operator: string, value: string): string {\n const matchers = getMatchersWithFilter(query, key, operator, value);\n for (const matcher of matchers) {\n query =\n matcher.parent?.type.id === LabelFilter ? removeLabelFilter(query, matcher) : removeSelector(query, matcher);\n }\n return query;\n}\n\nfunction removeLabelFilter(query: string, matcher: SyntaxNode): string {\n const pipelineStage = matcher.parent?.parent;\n if (!pipelineStage || pipelineStage.type.id !== PipelineStage) {\n return query;\n }\n return (query.substring(0, pipelineStage.from) + query.substring(pipelineStage.to)).trim();\n}\n\nfunction removeSelector(query: string, matcher: SyntaxNode): string {\n let selector: SyntaxNode | null = matcher;\n do {\n selector = selector.parent;\n } while (selector && selector.type.id !== Selector);\n const label = matcher.getChild(Identifier);\n if (!selector || !label) {\n return query;\n }\n const labelName = query.substring(label.from, label.to);\n\n const prefix = query.substring(0, selector.from);\n const suffix = query.substring(selector.to);\n\n const matchVisQuery = buildVisualQueryFromString(query.substring(selector.from, selector.to));\n matchVisQuery.query.labels = matchVisQuery.query.labels.filter((label) => label.label !== labelName);\n\n return prefix + modeller.renderQuery(matchVisQuery.query) + suffix;\n}\n\nfunction getMatchersWithFilter(query: string, label: string, operator: string, value: string): SyntaxNode[] {\n const tree = parser.parse(query);\n const matchers: SyntaxNode[] = [];\n tree.iterate({\n enter: ({ type, node }): void => {\n if (type.id === Matcher) {\n matchers.push(node);\n }\n },\n });\n return matchers.filter((matcher) => {\n const labelNode = matcher.getChild(Identifier);\n const opNode = labelNode?.nextSibling;\n const valueNode = matcher.getChild(String);\n if (!labelNode || !opNode || !valueNode) {\n return false;\n }\n const labelName = query.substring(labelNode.from, labelNode.to);\n if (labelName !== label) {\n return false;\n }\n const labelValue = query.substring(valueNode.from, valueNode.to);\n if (handleQuotes(labelValue) !== unescapeLabelValue(value)) {\n return false;\n }\n const labelOperator = query.substring(opNode.from, opNode.to);\n if (labelOperator !== operator) {\n return false;\n }\n return true;\n });\n}\n\n/**\n * Adds label filter to existing query. Useful for query modification for example for ad hoc filters.\n *\n * It uses LogQL parser to find instances of labels, alters them and then splices them back into the query.\n * In a case when we have parser, instead of adding new instance of label it adds label filter after the parser.\n *\n * This operates on substrings of the query with labels and operates just on those. This makes this\n * more robust and can alter even invalid queries, and preserves in general the query structure and whitespace.\n */\nexport function addLabelToQuery(\n query: string,\n key: string,\n operator: string,\n value: string,\n labelType?: LabelType | null\n): string {\n if (!key || !value) {\n throw new Error('Need label to add to query.');\n }\n\n const streamSelectorPositions = getStreamSelectorPositions(query);\n if (!streamSelectorPositions.length) {\n return query;\n }\n\n const parserPositions = getParserPositions(query);\n const labelFilterPositions = getLabelFilterPositions(query);\n const hasStreamSelectorMatchers = getMatcherInStreamPositions(query);\n const everyStreamSelectorHasMatcher = streamSelectorPositions.every((streamSelectorPosition) =>\n hasStreamSelectorMatchers.some(\n (matcherPosition) =>\n matcherPosition.from >= streamSelectorPosition.from && matcherPosition.to <= streamSelectorPosition.to\n )\n );\n\n const filter = toLabelFilter(key, value, operator);\n if (labelType === LabelType.Parsed || labelType === LabelType.StructuredMetadata) {\n const lastPositionsPerExpression = getLastPositionPerExpression(query, [\n ...streamSelectorPositions,\n ...labelFilterPositions,\n ...parserPositions,\n ]);\n\n return addFilterAsLabelFilter(query, lastPositionsPerExpression, filter);\n } else if (labelType === LabelType.Indexed) {\n return addFilterToStreamSelector(query, streamSelectorPositions, filter);\n } else {\n // labelType is not set, so we need to figure out where to add the label\n // if we don't have a parser, or have empty stream selectors, we will just add it to the stream selector\n if (parserPositions.length === 0 || everyStreamSelectorHasMatcher === false) {\n return addFilterToStreamSelector(query, streamSelectorPositions, filter);\n } else {\n // If `labelType` is not set, it indicates a potential metric query (`labelType` is present only in log queries that came from a Loki instance supporting the `categorize-labels` API). In case we are not adding the label to stream selectors we need to find the last position to add in each expression.\n // E.g. in `sum(rate({foo=\"bar\"} | logfmt [$__auto])) / sum(rate({foo=\"baz\"} | logfmt [$__auto]))` we need to add the label at two places.\n const lastPositionsPerExpression = getLastPositionPerExpression(query, [\n ...parserPositions,\n ...labelFilterPositions,\n ]);\n\n return addFilterAsLabelFilter(query, lastPositionsPerExpression, filter);\n }\n }\n}\n\nfunction getLastPositionPerExpression(query: string, positions: NodePosition[]): NodePosition[] {\n const subExpressions = findLeaves(getNodePositionsFromQuery(query, [Expr]));\n const subPositions = [...positions];\n\n // find last position for each subexpression\n const lastPositionsPerExpression = subExpressions.map((subExpression) => {\n return findLastPosition(\n subPositions.filter((p) => {\n return subExpression.contains(p);\n })\n );\n });\n return lastPositionsPerExpression;\n}\n\n/**\n * Adds parser to existing query. Useful for query modification for hints.\n * It uses LogQL parser to find instances of stream selectors or line filters and adds parser after them.\n *\n * @param query\n * @param parser\n */\nexport function addParserToQuery(query: string, parser: string): string {\n const lineFilterPositions = getLineFiltersPositions(query);\n\n if (lineFilterPositions.length) {\n return addParser(query, lineFilterPositions, parser);\n } else {\n const streamSelectorPositions = getStreamSelectorPositions(query);\n if (!streamSelectorPositions.length) {\n return query;\n }\n return addParser(query, streamSelectorPositions, parser);\n }\n}\n\n/**\n * Adds filtering for pipeline errors to existing query. Useful for query modification for hints.\n * It uses LogQL parser to find parsers and adds pipeline errors filtering after them.\n *\n * @param query\n */\nexport function addNoPipelineErrorToQuery(query: string): string {\n const parserPositions = getParserPositions(query);\n if (!parserPositions.length) {\n return query;\n }\n\n const filter = toLabelFilter('__error__', '', '=');\n return addFilterAsLabelFilter(query, parserPositions, filter);\n}\n\n/**\n * Adds label format to existing query. Useful for query modification for hints.\n * It uses LogQL parser to find log query and add label format at the end.\n *\n * @param query\n * @param labelFormat\n */\nexport function addLabelFormatToQuery(query: string, labelFormat: { originalLabel: string; renameTo: string }): string {\n const logQueryPositions = getLogQueryPositions(query);\n return addLabelFormat(query, logQueryPositions, labelFormat);\n}\n\n/**\n * Removes all comments from query.\n * It uses LogQL parser to find all LineComments and removes them.\n */\nexport function removeCommentsFromQuery(query: string): string {\n const lineCommentPositions = getLineCommentPositions(query);\n\n if (!lineCommentPositions.length) {\n return query;\n }\n\n let newQuery = '';\n let prev = 0;\n\n for (let lineCommentPosition of lineCommentPositions) {\n newQuery = newQuery + query.substring(prev, lineCommentPosition.from);\n prev = lineCommentPosition.to;\n }\n newQuery = newQuery + query.substring(prev);\n return newQuery;\n}\n\n/**\n * Parse the string and get all Selector positions in the query together with parsed representation of the\n * selector.\n * @param query\n */\nexport function getStreamSelectorPositions(query: string): NodePosition[] {\n const tree = parser.parse(query);\n const positions: NodePosition[] = [];\n tree.iterate({\n enter: ({ type, node }): false | void => {\n if (type.id === Selector) {\n positions.push(NodePosition.fromNode(node));\n return false;\n }\n },\n });\n return positions;\n}\n\n/**\n * Parse the string and get all LabelParser positions in the query.\n * @param query\n */\nexport function getParserPositions(query: string): NodePosition[] {\n const tree = parser.parse(query);\n const positions: NodePosition[] = [];\n const parserNodeTypes = [LabelParser, JsonExpressionParser, LogfmtParser, LogfmtExpressionParser];\n tree.iterate({\n enter: ({ type, node }): false | void => {\n if (parserNodeTypes.includes(type.id)) {\n positions.push(NodePosition.fromNode(node));\n return false;\n }\n },\n });\n return positions;\n}\n\n/**\n * Parse the string and get all LabelFilter positions in the query.\n * @param query\n */\nexport function getLabelFilterPositions(query: string): NodePosition[] {\n const tree = parser.parse(query);\n const positions: NodePosition[] = [];\n tree.iterate({\n enter: ({ type, node }): false | void => {\n if (type.id === LabelFilter) {\n positions.push(NodePosition.fromNode(node));\n return false;\n }\n },\n });\n return positions;\n}\n\n/**\n * Parse the string and get all Line filter positions in the query.\n * @param query\n */\nfunction getLineFiltersPositions(query: string): NodePosition[] {\n const tree = parser.parse(query);\n const positions: NodePosition[] = [];\n tree.iterate({\n enter: ({ type, node }): false | void => {\n if (type.id === LineFilters) {\n positions.push(NodePosition.fromNode(node));\n return false;\n }\n },\n });\n return positions;\n}\n\n/**\n * Parse the string and get all Log query positions in the query.\n * @param query\n */\nfunction getLogQueryPositions(query: string): NodePosition[] {\n const tree = parser.parse(query);\n const positions: NodePosition[] = [];\n tree.iterate({\n enter: ({ type, node }): false | void => {\n if (type.id === LogExpr) {\n positions.push(NodePosition.fromNode(node));\n return false;\n }\n\n // This is a case in metrics query\n if (type.id === LogRangeExpr) {\n // Unfortunately, LogRangeExpr includes both log and non-log (e.g. Duration/Range/...) parts of query.\n // We get position of all log-parts within LogRangeExpr: Selector, PipelineExpr and UnwrapExpr.\n const logPartsPositions: NodePosition[] = [];\n const selector = node.getChild(Selector);\n if (selector) {\n logPartsPositions.push(NodePosition.fromNode(selector));\n }\n\n const pipeline = node.getChild(PipelineExpr);\n if (pipeline) {\n logPartsPositions.push(NodePosition.fromNode(pipeline));\n }\n\n const unwrap = node.getChild(UnwrapExpr);\n if (unwrap) {\n logPartsPositions.push(NodePosition.fromNode(unwrap));\n }\n\n // We sort them and then pick \"from\" from first position and \"to\" from last position.\n const sorted = sortBy(logPartsPositions, (position) => position.to);\n positions.push(new NodePosition(sorted[0].from, sorted[sorted.length - 1].to));\n return false;\n }\n },\n });\n return positions;\n}\n\nexport function toLabelFilter(key: string, value: string, operator: string): QueryBuilderLabelFilter {\n // We need to make sure that we convert the value back to string because it may be a number\n return { label: key, op: operator, value };\n}\n\n/**\n * Add filter as to stream selectors\n * @param query\n * @param vectorSelectorPositions\n * @param filter\n */\nfunction addFilterToStreamSelector(\n query: string,\n vectorSelectorPositions: NodePosition[],\n filter: QueryBuilderLabelFilter\n): string {\n let newQuery = '';\n let prev = 0;\n\n for (let i = 0; i < vectorSelectorPositions.length; i++) {\n // This is basically just doing splice on a string for each matched vector selector.\n const match = vectorSelectorPositions[i];\n const isLast = i === vectorSelectorPositions.length - 1;\n\n const start = query.substring(prev, match.from);\n const end = isLast ? query.substring(match.to) : '';\n const matchVisQuery = buildVisualQueryFromString(query.substring(match.from, match.to));\n\n if (!labelExists(matchVisQuery.query.labels, filter)) {\n // We don't want to add duplicate labels.\n matchVisQuery.query.labels.push(filter);\n }\n const newLabels = modeller.renderQuery(matchVisQuery.query);\n newQuery += start + newLabels + end;\n prev = match.to;\n }\n return newQuery;\n}\n\n/**\n * Add filter as label filter after the parsers\n * @param query\n * @param positionsToAddAfter\n * @param filter\n */\nexport function addFilterAsLabelFilter(\n query: string,\n positionsToAddAfter: NodePosition[],\n filter: QueryBuilderLabelFilter\n): string {\n let newQuery = '';\n let prev = 0;\n\n for (let i = 0; i < positionsToAddAfter.length; i++) {\n // This is basically just doing splice on a string for each matched vector selector.\n const match = positionsToAddAfter[i];\n const isLast = i === positionsToAddAfter.length - 1;\n\n const start = query.substring(prev, match.to);\n const end = isLast ? query.substring(match.to) : '';\n\n let labelFilter = '';\n // For < and >, if the value is number, we don't add quotes around it and use it as number\n if (!Number.isNaN(Number(filter.value)) && (filter.op === '<' || filter.op === '>')) {\n labelFilter = ` | ${filter.label}${filter.op}${Number(filter.value)}`;\n } else {\n // we now unescape all escaped values again, because we are using backticks which can handle those cases.\n // we also don't care about the operator here, because we need to unescape for both, regex and equal.\n labelFilter = ` | ${filter.label}${filter.op}\\`${unescapeLabelValue(filter.value)}\\``;\n }\n\n newQuery += start + labelFilter + end;\n prev = match.to;\n }\n return newQuery;\n}\n\n/**\n * Add parser after line filter or stream selector\n * @param query\n * @param queryPartPositions\n * @param parser\n */\nfunction addParser(query: string, queryPartPositions: NodePosition[], parser: string): string {\n let newQuery = '';\n let prev = 0;\n\n for (let i = 0; i < queryPartPositions.length; i++) {\n // Splice on a string for each matched vector selector\n const match = queryPartPositions[i];\n const isLast = i === queryPartPositions.length - 1;\n\n const start = query.substring(prev, match.to);\n const end = isLast ? query.substring(match.to) : '';\n\n // Add parser\n newQuery += start + ` | ${parser}` + end;\n prev = match.to;\n }\n return newQuery;\n}\n\n/**\n * Add filter as label filter after the parsers\n * @param query\n * @param logQueryPositions\n * @param labelFormat\n */\nfunction addLabelFormat(\n query: string,\n logQueryPositions: NodePosition[],\n labelFormat: { originalLabel: string; renameTo: string }\n): string {\n let newQuery = '';\n let prev = 0;\n\n for (let i = 0; i < logQueryPositions.length; i++) {\n // This is basically just doing splice on a string for each matched vector selector.\n const match = logQueryPositions[i];\n const isLast = i === logQueryPositions.length - 1;\n\n const start = query.substring(prev, match.to);\n const end = isLast ? query.substring(match.to) : '';\n\n const labelFilter = ` | label_format ${labelFormat.renameTo}=${labelFormat.originalLabel}`;\n newQuery += start + labelFilter + end;\n prev = match.to;\n }\n return newQuery;\n}\n\nexport function addLineFilter(query: string, value = '', operator = '|='): string {\n const streamSelectorPositions = getStreamSelectorPositions(query);\n if (!streamSelectorPositions.length) {\n return query;\n }\n const streamSelectorEnd = streamSelectorPositions[0].to;\n\n const newQueryExpr = query.slice(0, streamSelectorEnd) + ` ${operator} \\`${value}\\`` + query.slice(streamSelectorEnd);\n return newQueryExpr;\n}\n\nfunction getLineCommentPositions(query: string): NodePosition[] {\n const tree = parser.parse(query);\n const positions: NodePosition[] = [];\n tree.iterate({\n enter: ({ type, from, to }): false | void => {\n if (type.id === LineComment) {\n positions.push(new NodePosition(from, to, type));\n return false;\n }\n },\n });\n return positions;\n}\n\n/**\n * Check if label exists in the list of labels but ignore the operator.\n * @param labels\n * @param filter\n */\nfunction labelExists(labels: QueryBuilderLabelFilter[], filter: QueryBuilderLabelFilter) {\n return labels.find((label) => label.label === filter.label && label.value === filter.value);\n}\n\n/**\n * Return the last position based on \"to\" property\n * @param positions\n */\nexport function findLastPosition(positions: NodePosition[]): NodePosition {\n if (!positions.length) {\n return new NodePosition(0, 0);\n }\n return positions.reduce((prev, current) => (prev.to > current.to ? prev : current));\n}\n\n/**\n * Gets all leaves of the nodes given. Leaves are nodes that don't contain any other nodes.\n *\n * @param {NodePosition[]} nodes\n * @return\n */\nfunction findLeaves(nodes: NodePosition[]): NodePosition[] {\n return nodes.filter((node) => nodes.every((n) => node.contains(n) === false || node === n));\n}\n\nfunction getAllPositionsInNodeByType(node: SyntaxNode, type: number): NodePosition[] {\n if (node.type.id === type) {\n return [NodePosition.fromNode(node)];\n }\n\n const positions: NodePosition[] = [];\n let pos = 0;\n let child = node.childAfter(pos);\n while (child) {\n positions.push(...getAllPositionsInNodeByType(child, type));\n pos = child.to;\n child = node.childAfter(pos);\n }\n return positions;\n}\n\nfunction getMatcherInStreamPositions(query: string): NodePosition[] {\n const tree = parser.parse(query);\n const positions: NodePosition[] = [];\n tree.iterate({\n enter: ({ node }): false | void => {\n if (node.type.id === Selector) {\n positions.push(...getAllPositionsInNodeByType(node, Matcher));\n }\n },\n });\n return positions;\n}\n","import { SyntaxNode } from '@lezer/common';\nimport { escapeRegExp } from 'lodash';\n\nimport {\n parser,\n LineFilter,\n PipeExact,\n PipeMatch,\n Filter,\n String,\n LabelFormatExpr,\n Selector,\n PipelineExpr,\n LabelParser,\n JsonExpressionParser,\n LabelFilter,\n MetricExpr,\n Matcher,\n Identifier,\n Range,\n formatLokiQuery,\n Logfmt,\n Json,\n OrFilter,\n FilterOp,\n} from '@grafana/lezer-logql';\nimport { reportInteraction } from '@grafana/runtime';\nimport { DataQuery } from '@grafana/schema';\n\nimport { placeHolderScopedVars } from './components/monaco-query-field/monaco-completion-provider/validation';\nimport { LokiDatasource } from './datasource';\nimport { getStreamSelectorPositions, NodePosition } from './modifyQuery';\nimport { ErrorId, replaceVariables, returnVariables } from './querybuilder/parsingUtils';\nimport { LokiQuery, LokiQueryType } from './types';\n\n/**\n * Returns search terms from a LogQL query.\n * E.g., `{} |= foo |=bar != baz` returns `['foo', 'bar']`.\n */\nexport function getHighlighterExpressionsFromQuery(input = ''): string[] {\n const results = [];\n\n const filters = getNodesFromQuery(input, [LineFilter]);\n\n for (const filter of filters) {\n const pipeExact = filter.getChild(Filter)?.getChild(PipeExact);\n const pipeMatch = filter.getChild(Filter)?.getChild(PipeMatch);\n const strings = getStringsFromLineFilter(filter);\n\n if ((!pipeExact && !pipeMatch) || !strings.length) {\n continue;\n }\n\n for (const string of strings) {\n const filterTerm = input.substring(string.from, string.to).trim();\n const backtickedTerm = filterTerm[0] === '`';\n const unwrappedFilterTerm = filterTerm.substring(1, filterTerm.length - 1);\n\n if (!unwrappedFilterTerm) {\n continue;\n }\n\n let resultTerm = '';\n\n // Only filter expressions with |~ operator are treated as regular expressions\n if (pipeMatch) {\n // When using backticks, Loki doesn't require to escape special characters and we can just push regular expression to highlights array\n // When using quotes, we have extra backslash escaping and we need to replace \\\\ with \\\n resultTerm = backtickedTerm ? unwrappedFilterTerm : unwrappedFilterTerm.replace(/\\\\\\\\/g, '\\\\');\n } else {\n // We need to escape this string so it is not matched as regular expression\n resultTerm = escapeRegExp(unwrappedFilterTerm);\n }\n\n if (resultTerm) {\n results.push(resultTerm);\n }\n }\n }\n return results;\n}\n\nexport function getStringsFromLineFilter(filter: SyntaxNode): SyntaxNode[] {\n const nodes: SyntaxNode[] = [];\n let node: SyntaxNode | null = filter;\n do {\n const string = node.getChild(String);\n if (string && !node.getChild(FilterOp)) {\n nodes.push(string);\n }\n node = node.getChild(OrFilter);\n } while (node != null);\n\n return nodes;\n}\n\nexport function getNormalizedLokiQuery(query: LokiQuery): LokiQuery {\n const queryType = getLokiQueryType(query);\n // instant and range are deprecated, we want to remove them\n const { instant, range, ...rest } = query;\n return { ...rest, queryType };\n}\n\nexport function getLokiQueryType(query: LokiQuery): LokiQueryType {\n // we are migrating from `.instant` and `.range` to `.queryType`\n // this function returns the correct query type\n const { queryType } = query;\n const hasValidQueryType =\n queryType === LokiQueryType.Range || queryType === LokiQueryType.Instant || queryType === LokiQueryType.Stream;\n\n // if queryType exists, it is respected\n if (hasValidQueryType) {\n return queryType;\n }\n\n // if no queryType, and instant===true, it's instant\n if (query.instant === true) {\n return LokiQueryType.Instant;\n }\n\n // otherwise it is range\n return LokiQueryType.Range;\n}\n\nconst tagsToObscure = ['String', 'Identifier', 'LineComment', 'Number'];\nconst partsToKeep = ['__error__', '__interval', '__interval_ms', '__auto'];\nexport function obfuscate(query: string): string {\n let obfuscatedQuery: string = query;\n const tree = parser.parse(query);\n tree.iterate({\n enter: ({ name, from, to }): false | void => {\n const queryPart = query.substring(from, to);\n if (tagsToObscure.includes(name) && !partsToKeep.includes(queryPart)) {\n obfuscatedQuery = obfuscatedQuery.replace(queryPart, name);\n }\n },\n });\n return obfuscatedQuery;\n}\n\nexport function parseToNodeNamesArray(query: string): string[] {\n const queryParts: string[] = [];\n const tree = parser.parse(query);\n tree.iterate({\n enter: ({ name }): false | void => {\n queryParts.push(name);\n },\n });\n return queryParts;\n}\n\nexport function isQueryWithNode(query: string, nodeType: number): boolean {\n let isQueryWithNode = false;\n const tree = parser.parse(query);\n tree.iterate({\n enter: ({ type }): false | void => {\n if (type.id === nodeType) {\n isQueryWithNode = true;\n return false;\n }\n },\n });\n return isQueryWithNode;\n}\n\nexport function getNodesFromQuery(query: string, nodeTypes?: number[]): SyntaxNode[] {\n const nodes: SyntaxNode[] = [];\n const tree = parser.parse(query);\n tree.iterate({\n enter: (node): false | void => {\n if (nodeTypes === undefined || nodeTypes.includes(node.type.id)) {\n nodes.push(node.node);\n }\n },\n });\n return nodes;\n}\n\nexport function getNodePositionsFromQuery(query: string, nodeTypes?: number[]): NodePosition[] {\n const positions: NodePosition[] = [];\n const tree = parser.parse(query);\n tree.iterate({\n enter: (node): false | void => {\n if (nodeTypes === undefined || nodeTypes.includes(node.type.id)) {\n positions.push(NodePosition.fromNode(node.node));\n }\n },\n });\n return positions;\n}\n\nexport function getNodeFromQuery(query: string, nodeType: number): SyntaxNode | undefined {\n const nodes = getNodesFromQuery(query, [nodeType]);\n return nodes.length > 0 ? nodes[0] : undefined;\n}\n\n/**\n * Parses the query and looks for error nodes. If there is at least one, it returns true.\n * Grafana variables are considered errors, so if you need to validate a query\n * with variables you should interpolate it first.\n */\nexport function isQueryWithError(query: string): boolean {\n return isQueryWithNode(query, ErrorId);\n}\n\nexport function isLogsQuery(query: string): boolean {\n return !isQueryWithNode(query, MetricExpr);\n}\n\nexport function isQueryWithParser(query: string): { queryWithParser: boolean; parserCount: number } {\n const nodes = getNodesFromQuery(query, [LabelParser, JsonExpressionParser, Logfmt]);\n const parserCount = nodes.length;\n return { queryWithParser: parserCount > 0, parserCount };\n}\n\nexport function getParserFromQuery(query: string): string | undefined {\n const parsers = getNodesFromQuery(query, [LabelParser, Json, Logfmt]);\n return parsers.length > 0 ? query.substring(parsers[0].from, parsers[0].to).trim() : undefined;\n}\n\nexport function isQueryPipelineErrorFiltering(query: string): boolean {\n const labels = getNodesFromQuery(query, [LabelFilter]);\n for (const node of labels) {\n const label = node.getChild(Matcher)?.getChild(Identifier);\n if (label) {\n const labelName = query.substring(label.from, label.to);\n if (labelName === '__error__') {\n return true;\n }\n }\n }\n return false;\n}\n\nexport function isQueryWithLabelFormat(query: string): boolean {\n return isQueryWithNode(query, LabelFormatExpr);\n}\n\nexport function getLogQueryFromMetricsQuery(query: string): string {\n if (isLogsQuery(query)) {\n return query;\n }\n\n // Log query in metrics query composes of Selector & PipelineExpr\n const selectorNode = getNodeFromQuery(query, Selector);\n if (!selectorNode) {\n return '';\n }\n const selector = query.substring(selectorNode.from, selectorNode.to);\n\n const pipelineExprNode = getNodeFromQuery(query, PipelineExpr);\n const pipelineExpr = pipelineExprNode ? query.substring(pipelineExprNode.from, pipelineExprNode.to) : '';\n\n return `${selector} ${pipelineExpr}`.trim();\n}\n\nexport function getLogQueryFromMetricsQueryAtPosition(query: string, position: number): string {\n if (isLogsQuery(query)) {\n return query;\n }\n\n const metricQuery = getNodesFromQuery(query, [MetricExpr])\n .reverse() // So we don't get the root metric node\n .find((node) => node.from <= position && node.to >= position);\n if (!metricQuery) {\n return '';\n }\n return getLogQueryFromMetricsQuery(query.substring(metricQuery.from, metricQuery.to));\n}\n\nexport function isQueryWithLabelFilter(query: string): boolean {\n return isQueryWithNode(query, LabelFilter);\n}\n\nexport function isQueryWithLineFilter(query: string): boolean {\n return isQueryWithNode(query, LineFilter);\n}\n\nexport function isQueryWithRangeVariable(query: string): boolean {\n const rangeNodes = getNodesFromQuery(query, [Range]);\n for (const node of rangeNodes) {\n if (query.substring(node.from, node.to).match(/\\[\\$__range(_s|_ms)?/)) {\n return true;\n }\n }\n return false;\n}\n\nexport function getStreamSelectorsFromQuery(query: string): string[] {\n const labelMatcherPositions = getStreamSelectorPositions(query);\n\n const labelMatchers = labelMatcherPositions.map((labelMatcher) => {\n return query.slice(labelMatcher.from, labelMatcher.to);\n });\n\n return labelMatchers;\n}\n\nexport function requestSupportsSplitting(allQueries: LokiQuery[]) {\n const queries = allQueries\n .filter((query) => !query.hide)\n .filter((query) => !query.refId.includes('do-not-chunk'))\n .filter((query) => query.expr);\n\n return queries.length > 0;\n}\n\nexport const isLokiQuery = (query: DataQuery): query is LokiQuery => {\n if (!query) {\n return false;\n }\n\n const lokiQuery = query as LokiQuery;\n return lokiQuery.expr !== undefined;\n};\n\nexport const getLokiQueryFromDataQuery = (query?: DataQuery): LokiQuery | undefined => {\n if (!query || !isLokiQuery(query)) {\n return undefined;\n }\n\n return query;\n};\n\nexport function formatLogqlQuery(query: string, datasource: LokiDatasource) {\n const isInvalid = isQueryWithError(datasource.interpolateString(query, placeHolderScopedVars));\n\n reportInteraction('grafana_loki_format_query_clicked', {\n is_invalid: isInvalid,\n query_type: isLogsQuery(query) ? 'logs' : 'metric',\n });\n\n if (isInvalid) {\n return query;\n }\n\n let transformedQuery = replaceVariables(query);\n const transformationMatches = [];\n const tree = parser.parse(transformedQuery);\n\n // Variables are considered errors inside of the parser, so we need to remove them before formatting\n // We replace all variables with [0s] and keep track of the replaced variables\n // After formatting we replace [0s] with the original variable\n if (tree.topNode.firstChild?.firstChild?.type.id === MetricExpr) {\n const pattern = /\\[__V_[0-2]__\\w+__V__\\]/g;\n transformationMatches.push(...transformedQuery.matchAll(pattern));\n transformedQuery = transformedQuery.replace(pattern, '[0s]');\n }\n\n let formatted = formatLokiQuery(transformedQuery);\n\n if (tree.topNode.firstChild?.firstChild?.type.id === MetricExpr) {\n transformationMatches.forEach((match) => {\n formatted = formatted.replace('[0s]', match[0]);\n });\n }\n\n return returnVariables(formatted);\n}\n","import { Registry } from '@grafana/data';\nimport { BINARY_OPERATIONS_KEY } from './types.js';\n\nclass QueryModellerBase {\n constructor(operationDefinitions, innerQueryPlaceholder) {\n this.categories = [];\n this.operationsRegistry = new Registry(() => operationDefinitions);\n this.innerQueryPlaceholder = innerQueryPlaceholder || \"\";\n }\n setOperationCategories(categories) {\n this.categories = categories;\n }\n getOperationsForCategory(category) {\n return this.operationsRegistry.list().filter((op) => op.category === category && !op.hideFromList);\n }\n getAlternativeOperations(key) {\n return this.operationsRegistry.list().filter((op) => op.alternativesKey && op.alternativesKey === key);\n }\n getCategories() {\n return this.categories;\n }\n getOperationDefinition(id) {\n return this.operationsRegistry.getIfExists(id);\n }\n hasBinaryOp(query) {\n return query.operations.find((op) => {\n const def = this.getOperationDefinition(op.id);\n return (def == null ? void 0 : def.category) === BINARY_OPERATIONS_KEY;\n }) !== void 0;\n }\n}\n\nexport { QueryModellerBase };\n//# sourceMappingURL=QueryModellerBase.js.map\n","import {\n QueryModellerBase,\n QueryBuilderLabelFilter,\n VisualQuery,\n QueryBuilderOperation,\n VisualQueryBinary,\n} from '@grafana/experimental';\n\nimport { operationDefinitions } from './operations';\nimport { LokiOperationId, LokiQueryPattern, LokiQueryPatternType, LokiVisualQueryOperationCategory } from './types';\n\nexport class LokiQueryModeller extends QueryModellerBase {\n constructor() {\n super(operationDefinitions, '');\n\n this.setOperationCategories([\n LokiVisualQueryOperationCategory.Aggregations,\n LokiVisualQueryOperationCategory.RangeFunctions,\n LokiVisualQueryOperationCategory.Formats,\n LokiVisualQueryOperationCategory.BinaryOps,\n LokiVisualQueryOperationCategory.LabelFilters,\n LokiVisualQueryOperationCategory.LineFilters,\n ]);\n }\n\n renderOperations(queryString: string, operations: QueryBuilderOperation[]): string {\n for (const operation of operations) {\n const def = this.operationsRegistry.getIfExists(operation.id);\n if (!def) {\n console.error(`Could not find operation ${operation.id} in the registry`);\n continue;\n }\n queryString = def.renderer(operation, def, queryString);\n }\n return queryString;\n }\n\n renderBinaryQueries(queryString: string, binaryQueries?: Array>) {\n if (binaryQueries) {\n for (const binQuery of binaryQueries) {\n queryString = `${this.renderBinaryQuery(queryString, binQuery)}`;\n }\n }\n return queryString;\n }\n\n private renderBinaryQuery(leftOperand: string, binaryQuery: VisualQueryBinary) {\n let result = leftOperand + ` ${binaryQuery.operator} `;\n\n if (binaryQuery.vectorMatches) {\n result += `${binaryQuery.vectorMatchesType}(${binaryQuery.vectorMatches}) `;\n }\n\n return result + this.renderQuery(binaryQuery.query, true);\n }\n\n renderLabels(labels: QueryBuilderLabelFilter[]): string {\n if (labels.length === 0) {\n return '{}';\n }\n\n let expr = '{';\n for (const filter of labels) {\n if (expr !== '{') {\n expr += ', ';\n }\n\n expr += `${filter.label}${filter.op}\"${filter.value}\"`;\n }\n\n return expr + `}`;\n }\n\n renderQuery(query: VisualQuery, nested?: boolean): string {\n let queryString = this.renderLabels(query.labels);\n queryString = this.renderOperations(queryString, query.operations);\n\n if (!nested && this.hasBinaryOp(query) && Boolean(query.binaryQueries?.length)) {\n queryString = `(${queryString})`;\n }\n\n queryString = this.renderBinaryQueries(queryString, query.binaryQueries);\n\n return queryString;\n }\n\n getQueryPatterns(): LokiQueryPattern[] {\n return [\n {\n name: 'Parse log lines with logfmt parser',\n type: LokiQueryPatternType.Log,\n // {} | logfmt | __error__=``\n operations: [\n { id: LokiOperationId.Logfmt, params: [] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n ],\n },\n {\n name: 'Parse log lines with JSON parser',\n type: LokiQueryPatternType.Log,\n // {} | json | __error__=``\n operations: [\n { id: LokiOperationId.Json, params: [] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n ],\n },\n {\n name: 'Filter log line and parse with logfmt parser',\n type: LokiQueryPatternType.Log,\n // {} |= `` | logfmt | __error__=``\n operations: [\n { id: LokiOperationId.LineContains, params: [''] },\n { id: LokiOperationId.Logfmt, params: [] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n ],\n },\n {\n name: 'Filter log lines and parse with json parser',\n type: LokiQueryPatternType.Log,\n // {} |= `` | json | __error__=``\n operations: [\n { id: LokiOperationId.LineContains, params: [''] },\n { id: LokiOperationId.Json, params: [] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n ],\n },\n {\n name: 'Parse log line with logfmt parser and use label filter',\n type: LokiQueryPatternType.Log,\n // {} |= `` | logfmt | __error__=`` | label=`value`\n operations: [\n { id: LokiOperationId.LineContains, params: [''] },\n { id: LokiOperationId.Logfmt, params: [] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n { id: LokiOperationId.LabelFilter, params: ['label', '=', 'value'] },\n ],\n },\n {\n name: 'Parse log lines with nested json',\n type: LokiQueryPatternType.Log,\n // {} |= `` | json | line_format `{{ .message}}` | json\n operations: [\n { id: LokiOperationId.LineContains, params: [''] },\n { id: LokiOperationId.Json, params: [] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n { id: LokiOperationId.LineFormat, params: ['{{.message}}'] },\n { id: LokiOperationId.Json, params: [] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n ],\n },\n {\n name: 'Reformat log lines',\n type: LokiQueryPatternType.Log,\n // {} |= `` | logfmt | line_format `{{.message}}`\n operations: [\n { id: LokiOperationId.LineContains, params: [''] },\n { id: LokiOperationId.Logfmt, params: [] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n { id: LokiOperationId.LineFormat, params: ['{{.message}}'] },\n ],\n },\n {\n name: 'Rename lvl label to level',\n type: LokiQueryPatternType.Log,\n // {} |= `` | logfmt | label_format level=lvl\n operations: [\n { id: LokiOperationId.LineContains, params: [''] },\n { id: LokiOperationId.Logfmt, params: [] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n { id: LokiOperationId.LabelFormat, params: ['lvl', 'level'] },\n ],\n },\n {\n name: 'Query on value inside a log line',\n type: LokiQueryPatternType.Metric,\n // sum(sum_over_time({ | logfmt | __error__=`` | unwrap | __error__=`` [$__auto]))\n operations: [\n { id: LokiOperationId.LineContains, params: [''] },\n { id: LokiOperationId.Logfmt, params: [] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n { id: LokiOperationId.Unwrap, params: [''] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n { id: LokiOperationId.SumOverTime, params: ['$__auto'] },\n { id: LokiOperationId.Sum, params: [] },\n ],\n },\n {\n name: 'Total requests per label of streams',\n type: LokiQueryPatternType.Metric,\n // sum by() (count_over_time({}[$__auto)\n operations: [\n { id: LokiOperationId.LineContains, params: [''] },\n { id: LokiOperationId.CountOverTime, params: ['$__auto'] },\n { id: LokiOperationId.Sum, params: [] },\n ],\n },\n {\n name: 'Total requests per parsed label or label of streams',\n type: LokiQueryPatternType.Metric,\n // sum by() (count_over_time({}| logfmt | __error__=`` [$__auto))\n operations: [\n { id: LokiOperationId.LineContains, params: [''] },\n { id: LokiOperationId.Logfmt, params: [] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n { id: LokiOperationId.CountOverTime, params: ['$__auto'] },\n { id: LokiOperationId.Sum, params: [] },\n ],\n },\n {\n name: 'Bytes used by a log stream',\n type: LokiQueryPatternType.Metric,\n // bytes_over_time({}[$__auto])\n operations: [\n { id: LokiOperationId.LineContains, params: [''] },\n { id: LokiOperationId.BytesOverTime, params: ['$__auto'] },\n ],\n },\n {\n name: 'Count of log lines per stream',\n type: LokiQueryPatternType.Metric,\n // count_over_time({}[$__auto])\n operations: [\n { id: LokiOperationId.LineContains, params: [''] },\n { id: LokiOperationId.CountOverTime, params: ['$__auto'] },\n ],\n },\n {\n name: 'Top N results by label or parsed label',\n type: LokiQueryPatternType.Metric,\n // topk(10, sum by () (count_over_time({} | logfmt | __error__=`` [$__auto])))\n operations: [\n { id: LokiOperationId.Logfmt, params: [] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n { id: LokiOperationId.CountOverTime, params: ['$__auto'] },\n { id: LokiOperationId.Sum, params: [] },\n { id: LokiOperationId.TopK, params: [10] },\n ],\n },\n {\n name: 'Extracted quantile',\n type: LokiQueryPatternType.Metric,\n // quantile_over_time(0.5,{} | logfmt | unwrap latency[$__auto]) by ()\n operations: [\n { id: LokiOperationId.Logfmt, params: [] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n { id: LokiOperationId.Unwrap, params: ['latency'] },\n { id: LokiOperationId.LabelFilterNoErrors, params: [] },\n { id: LokiOperationId.QuantileOverTime, params: ['$__auto', 0.5] },\n { id: LokiOperationId.Sum, params: [] },\n ],\n },\n ];\n }\n}\n\nexport const lokiQueryModeller = new LokiQueryModeller();\n","import {\n QueryBuilderOperation,\n QueryBuilderOperationDefinition,\n QueryBuilderOperationParamDef,\n} from '@grafana/experimental';\n\nimport { defaultAddOperationHandler } from './operationUtils';\nimport { LokiOperationId, LokiVisualQueryOperationCategory } from './types';\n\nexport const binaryScalarDefs = [\n {\n id: LokiOperationId.Addition,\n name: 'Add scalar',\n sign: '+',\n },\n {\n id: LokiOperationId.Subtraction,\n name: 'Subtract scalar',\n sign: '-',\n },\n {\n id: LokiOperationId.MultiplyBy,\n name: 'Multiply by scalar',\n sign: '*',\n },\n {\n id: LokiOperationId.DivideBy,\n name: 'Divide by scalar',\n sign: '/',\n },\n {\n id: LokiOperationId.Modulo,\n name: 'Modulo by scalar',\n sign: '%',\n },\n {\n id: LokiOperationId.Exponent,\n name: 'Exponent',\n sign: '^',\n },\n {\n id: LokiOperationId.EqualTo,\n name: 'Equal to',\n sign: '==',\n comparison: true,\n },\n {\n id: LokiOperationId.NotEqualTo,\n name: 'Not equal to',\n sign: '!=',\n comparison: true,\n },\n {\n id: LokiOperationId.GreaterThan,\n name: 'Greater than',\n sign: '>',\n comparison: true,\n },\n {\n id: LokiOperationId.LessThan,\n name: 'Less than',\n sign: '<',\n comparison: true,\n },\n {\n id: LokiOperationId.GreaterOrEqual,\n name: 'Greater or equal to',\n sign: '>=',\n comparison: true,\n },\n {\n id: LokiOperationId.LessOrEqual,\n name: 'Less or equal to',\n sign: '<=',\n comparison: true,\n },\n];\n\n// Not sure about this one. It could also be a more generic 'Simple math operation' where user specifies\n// both the operator and the operand in a single input\nexport const binaryScalarOperations: QueryBuilderOperationDefinition[] = binaryScalarDefs.map((opDef) => {\n const params: QueryBuilderOperationParamDef[] = [{ name: 'Value', type: 'number' }];\n const defaultParams: any[] = [2];\n if (opDef.comparison) {\n params.push({\n name: 'Bool',\n type: 'boolean',\n description: 'If checked comparison will return 0 or 1 for the value rather than filtering.',\n });\n defaultParams.push(false);\n }\n\n return {\n id: opDef.id,\n name: opDef.name,\n params,\n defaultParams,\n alternativesKey: 'binary scalar operations',\n category: LokiVisualQueryOperationCategory.BinaryOps,\n renderer: getSimpleBinaryRenderer(opDef.sign),\n addOperationHandler: defaultAddOperationHandler,\n };\n});\n\nfunction getSimpleBinaryRenderer(operator: string) {\n return function binaryRenderer(\n model: QueryBuilderOperation,\n def: QueryBuilderOperationDefinition,\n innerExpr: string\n ) {\n let param = model.params[0];\n let bool = '';\n if (model.params.length === 2) {\n bool = model.params[1] ? ' bool' : '';\n }\n\n return `${innerExpr} ${operator}${bool} ${param}`;\n };\n}\n","import React, { useState } from 'react';\n\nimport { SelectableValue, getDefaultTimeRange, toOption } from '@grafana/data';\nimport { QueryBuilderOperationParamEditorProps, VisualQueryModeller } from '@grafana/experimental';\nimport { config } from '@grafana/runtime';\nimport { Select } from '@grafana/ui';\n\nimport { placeHolderScopedVars } from '../../components/monaco-query-field/monaco-completion-provider/validation';\nimport { LokiDatasource } from '../../datasource';\nimport { getLogQueryFromMetricsQuery, isQueryWithError } from '../../queryUtils';\nimport { extractUnwrapLabelKeysFromDataFrame } from '../../responseUtils';\nimport { getOperationParamId } from '../operationUtils';\nimport { LokiVisualQuery } from '../types';\n\nexport function UnwrapParamEditor({\n onChange,\n index,\n operationId,\n value,\n query,\n datasource,\n timeRange,\n queryModeller,\n}: QueryBuilderOperationParamEditorProps) {\n const [state, setState] = useState<{\n options?: Array>;\n isLoading?: boolean;\n }>({});\n\n return (\n {\n // This check is always true, we do it to make typescript happy\n if (datasource instanceof LokiDatasource && config.featureToggles.lokiQueryHints) {\n setState({ isLoading: true });\n const options = await loadUnwrapOptions(query, datasource, queryModeller, timeRange);\n setState({ options, isLoading: undefined });\n }\n }}\n isLoading={state.isLoading}\n allowCustomValue\n noOptionsMessage=\"No labels found\"\n loadingMessage=\"Loading labels\"\n options={state.options}\n value={value ? toOption(value.toString()) : null}\n onChange={(value) => {\n if (value.value) {\n onChange(index, value.value);\n }\n }}\n />\n );\n}\n\nasync function loadUnwrapOptions(\n query: LokiVisualQuery,\n datasource: LokiDatasource,\n queryModeller: VisualQueryModeller,\n timeRange = getDefaultTimeRange()\n): Promise>> {\n const queryExpr = queryModeller.renderQuery(query);\n const logExpr = getLogQueryFromMetricsQuery(queryExpr);\n if (isQueryWithError(datasource.interpolateString(logExpr, placeHolderScopedVars))) {\n return [];\n }\n\n const samples = await datasource.getDataSamples({ expr: logExpr, refId: 'unwrap_samples' }, timeRange);\n const unwrapLabels = extractUnwrapLabelKeysFromDataFrame(samples[0]);\n\n const labelOptions = unwrapLabels.map((label) => ({\n label,\n value: label,\n }));\n\n return labelOptions;\n}\n","import { QueryBuilderOperationDefinition, QueryBuilderOperationParamValue } from '@grafana/experimental';\n\nimport { binaryScalarOperations } from './binaryScalarOperations';\nimport { UnwrapParamEditor } from './components/UnwrapParamEditor';\nimport {\n addLokiOperation,\n addNestedQueryHandler,\n createAggregationOperation,\n createAggregationOperationWithParam,\n createRangeOperation,\n createRangeOperationWithGrouping,\n getLineFilterRenderer,\n labelFilterRenderer,\n pipelineRenderer,\n} from './operationUtils';\nimport { LokiOperationId, LokiOperationOrder, lokiOperators, LokiVisualQueryOperationCategory } from './types';\n\nfunction getOperationDefinitions(): QueryBuilderOperationDefinition[] {\n const aggregations = [\n LokiOperationId.Sum,\n LokiOperationId.Min,\n LokiOperationId.Max,\n LokiOperationId.Avg,\n LokiOperationId.Stddev,\n LokiOperationId.Stdvar,\n LokiOperationId.Count,\n ].flatMap((opId) =>\n createAggregationOperation(opId, {\n addOperationHandler: addLokiOperation,\n orderRank: LokiOperationOrder.Last,\n })\n );\n\n const aggregationsWithParam = [LokiOperationId.TopK, LokiOperationId.BottomK].flatMap((opId) => {\n return createAggregationOperationWithParam(\n opId,\n {\n params: [{ name: 'K-value', type: 'number' }],\n defaultParams: [5],\n },\n {\n addOperationHandler: addLokiOperation,\n orderRank: LokiOperationOrder.Last,\n }\n );\n });\n\n const rangeOperations = [\n createRangeOperation(LokiOperationId.Rate),\n createRangeOperation(LokiOperationId.RateCounter),\n createRangeOperation(LokiOperationId.CountOverTime),\n createRangeOperation(LokiOperationId.SumOverTime),\n createRangeOperation(LokiOperationId.BytesRate),\n createRangeOperation(LokiOperationId.BytesOverTime),\n createRangeOperation(LokiOperationId.AbsentOverTime),\n ];\n\n const rangeOperationsWithGrouping = [\n ...createRangeOperationWithGrouping(LokiOperationId.AvgOverTime),\n ...createRangeOperationWithGrouping(LokiOperationId.MaxOverTime),\n ...createRangeOperationWithGrouping(LokiOperationId.MinOverTime),\n ...createRangeOperationWithGrouping(LokiOperationId.FirstOverTime),\n ...createRangeOperationWithGrouping(LokiOperationId.LastOverTime),\n ...createRangeOperationWithGrouping(LokiOperationId.StdvarOverTime),\n ...createRangeOperationWithGrouping(LokiOperationId.StddevOverTime),\n ...createRangeOperationWithGrouping(LokiOperationId.QuantileOverTime),\n ];\n\n const list: QueryBuilderOperationDefinition[] = [\n ...aggregations,\n ...aggregationsWithParam,\n ...rangeOperations,\n ...rangeOperationsWithGrouping,\n {\n id: LokiOperationId.Json,\n name: 'Json',\n params: [\n {\n name: 'Expression',\n type: 'string',\n restParam: true,\n optional: true,\n minWidth: 18,\n placeholder: 'server=\"servers[0]\"',\n description:\n 'Using expressions with your json parser will extract only the specified json fields to labels. You can specify one or more expressions in this way. All expressions must be quoted.',\n },\n ],\n defaultParams: [],\n alternativesKey: 'format',\n category: LokiVisualQueryOperationCategory.Formats,\n orderRank: LokiOperationOrder.Parsers,\n renderer: pipelineRenderer,\n addOperationHandler: addLokiOperation,\n explainHandler: () =>\n `This will extract keys and values from a [json](https://grafana.com/docs/loki/latest/logql/log_queries/#json) formatted log line as labels. The extracted labels can be used in label filter expressions and used as values for a range aggregation via the unwrap operation.`,\n },\n {\n id: LokiOperationId.Logfmt,\n name: 'Logfmt',\n params: [\n {\n name: 'Strict',\n type: 'boolean',\n optional: true,\n description:\n 'With strict parsing enabled, the logfmt parser immediately stops scanning the log line and returns early with an error when it encounters any poorly formatted key/value pair.',\n },\n {\n name: 'Keep empty',\n type: 'boolean',\n optional: true,\n description:\n 'The logfmt parser retains standalone keys (keys without a value) as labels with its value set to empty string. ',\n },\n {\n name: 'Expression',\n type: 'string',\n optional: true,\n restParam: true,\n minWidth: 18,\n placeholder: 'field_name',\n description:\n 'Using expressions with your logfmt parser will extract and rename (if provided) only the specified fields to labels. You can specify one or more expressions in this way.',\n },\n ],\n defaultParams: [false, false],\n alternativesKey: 'format',\n category: LokiVisualQueryOperationCategory.Formats,\n orderRank: LokiOperationOrder.Parsers,\n renderer: pipelineRenderer,\n addOperationHandler: addLokiOperation,\n explainHandler: () =>\n `This will extract all keys and values from a [logfmt](https://grafana.com/docs/loki/latest/logql/log_queries/#logfmt) formatted log line as labels. The extracted labels can be used in label filter expressions and used as values for a range aggregation via the unwrap operation.`,\n },\n {\n id: LokiOperationId.Regexp,\n name: 'Regexp',\n params: [\n {\n name: 'String',\n type: 'string',\n hideName: true,\n placeholder: '',\n description: 'The regexp expression that matches the structure of a log line.',\n minWidth: 20,\n },\n ],\n defaultParams: [''],\n alternativesKey: 'format',\n category: LokiVisualQueryOperationCategory.Formats,\n orderRank: LokiOperationOrder.Parsers,\n renderer: (model, def, innerExpr) => `${innerExpr} | regexp \\`${model.params[0]}\\``,\n addOperationHandler: addLokiOperation,\n explainHandler: () =>\n `The [regexp parser](https://grafana.com/docs/loki/latest/logql/log_queries/#regular-expression) takes a single parameter | regexp \"\" which is the regular expression using the Golang RE2 syntax. The regular expression must contain a least one named sub-match (e.g (?Pre)), each sub-match will extract a different label. The expression matches the structure of a log line. The extracted labels can be used in label filter expressions and used as values for a range aggregation via the unwrap operation.`,\n },\n {\n id: LokiOperationId.Pattern,\n name: 'Pattern',\n params: [\n {\n name: 'String',\n type: 'string',\n hideName: true,\n placeholder: '',\n description: 'The expression that matches the structure of a log line.',\n minWidth: 20,\n },\n ],\n defaultParams: [''],\n alternativesKey: 'format',\n category: LokiVisualQueryOperationCategory.Formats,\n orderRank: LokiOperationOrder.Parsers,\n renderer: (model, def, innerExpr) => `${innerExpr} | pattern \\`${model.params[0]}\\``,\n addOperationHandler: addLokiOperation,\n explainHandler: () =>\n `The [pattern parser](https://grafana.com/docs/loki/latest/logql/log_queries/#pattern) allows the explicit extraction of fields from log lines by defining a pattern expression (| pattern \\`\\`). The expression matches the structure of a log line. The extracted labels can be used in label filter expressions and used as values for a range aggregation via the unwrap operation.`,\n },\n {\n id: LokiOperationId.Unpack,\n name: 'Unpack',\n params: [],\n defaultParams: [],\n alternativesKey: 'format',\n category: LokiVisualQueryOperationCategory.Formats,\n orderRank: LokiOperationOrder.Parsers,\n renderer: pipelineRenderer,\n addOperationHandler: addLokiOperation,\n explainHandler: () =>\n `This will extract all keys and values from a JSON log line, [unpacking](https://grafana.com/docs/loki/latest/logql/log_queries/#unpack) all embedded labels in the pack stage. The extracted labels can be used in label filter expressions and used as values for a range aggregation via the unwrap operation.`,\n },\n {\n id: LokiOperationId.LineFormat,\n name: 'Line format',\n params: [\n {\n name: 'String',\n type: 'string',\n hideName: true,\n placeholder: '{{.status_code}}',\n description: 'A line template that can refer to stream labels and extracted labels.',\n minWidth: 20,\n },\n ],\n defaultParams: [''],\n alternativesKey: 'format',\n category: LokiVisualQueryOperationCategory.Formats,\n orderRank: LokiOperationOrder.PipeOperations,\n renderer: (model, def, innerExpr) => `${innerExpr} | line_format \\`${model.params[0]}\\``,\n addOperationHandler: addLokiOperation,\n explainHandler: () =>\n `This will replace log line using a specified template. The template can refer to stream labels and extracted labels.\n\nExample: \\`{{.status_code}} - {{.message}}\\`\n\n[Read the docs](https://grafana.com/docs/loki/latest/logql/log_queries/#line-format-expression) for more.\n `,\n },\n {\n id: LokiOperationId.LabelFormat,\n name: 'Label format',\n params: [\n { name: 'Label', type: 'string' },\n { name: 'Rename to', type: 'string' },\n ],\n defaultParams: ['', ''],\n alternativesKey: 'format',\n category: LokiVisualQueryOperationCategory.Formats,\n orderRank: LokiOperationOrder.PipeOperations,\n renderer: (model, def, innerExpr) => `${innerExpr} | label_format ${model.params[1]}=${model.params[0]}`,\n addOperationHandler: addLokiOperation,\n explainHandler: () =>\n `This will change name of label to desired new label. In the example below, label \"error_level\" will be renamed to \"level\".\n\nExample: \\`\\`error_level=\\`level\\` \\`\\`\n\n[Read the docs](https://grafana.com/docs/loki/latest/logql/log_queries/#labels-format-expression) for more.\n `,\n },\n\n {\n id: LokiOperationId.LineContains,\n name: 'Line contains',\n params: [\n {\n name: '',\n type: 'string',\n hideName: true,\n restParam: true,\n placeholder: 'Text to find',\n description: 'Find log lines that contains this text',\n minWidth: 20,\n runQueryOnEnter: true,\n },\n ],\n defaultParams: [''],\n alternativesKey: 'line filter',\n category: LokiVisualQueryOperationCategory.LineFilters,\n orderRank: LokiOperationOrder.LineFilters,\n renderer: getLineFilterRenderer('|='),\n addOperationHandler: addLokiOperation,\n explainHandler: (op) => `Return log lines that contain string \\`${op.params?.join('`, or `')}\\`.`,\n },\n {\n id: LokiOperationId.LineContainsNot,\n name: 'Line does not contain',\n params: [\n {\n name: '',\n type: 'string',\n hideName: true,\n restParam: true,\n placeholder: 'Text to exclude',\n description: 'Find log lines that does not contain this text',\n minWidth: 26,\n runQueryOnEnter: true,\n },\n ],\n defaultParams: [''],\n alternativesKey: 'line filter',\n category: LokiVisualQueryOperationCategory.LineFilters,\n orderRank: LokiOperationOrder.LineFilters,\n renderer: getLineFilterRenderer('!='),\n addOperationHandler: addLokiOperation,\n explainHandler: (op) => `Return log lines that does not contain string \\`${op.params?.join('`, or `')}\\`.`,\n },\n {\n id: LokiOperationId.LineContainsCaseInsensitive,\n name: 'Line contains case insensitive',\n params: [\n {\n name: '',\n type: 'string',\n hideName: true,\n restParam: true,\n placeholder: 'Text to find',\n description: 'Find log lines that contains this text',\n minWidth: 33,\n runQueryOnEnter: true,\n },\n ],\n defaultParams: [''],\n alternativesKey: 'line filter',\n category: LokiVisualQueryOperationCategory.LineFilters,\n orderRank: LokiOperationOrder.LineFilters,\n renderer: getLineFilterRenderer('|~', true),\n addOperationHandler: addLokiOperation,\n explainHandler: (op) => `Return log lines that match regex \\`(?i)${op.params?.join('`, or `(?i)')}\\`.`,\n },\n {\n id: LokiOperationId.LineContainsNotCaseInsensitive,\n name: 'Line does not contain case insensitive',\n params: [\n {\n name: '',\n type: 'string',\n hideName: true,\n restParam: true,\n placeholder: 'Text to exclude',\n description: 'Find log lines that does not contain this text',\n minWidth: 40,\n runQueryOnEnter: true,\n },\n ],\n defaultParams: [''],\n alternativesKey: 'line filter',\n category: LokiVisualQueryOperationCategory.LineFilters,\n orderRank: LokiOperationOrder.LineFilters,\n renderer: getLineFilterRenderer('!~', true),\n addOperationHandler: addLokiOperation,\n explainHandler: (op) => `Return log lines that does not match regex \\`(?i)${op.params?.join('`, or `(?i)')}\\`.`,\n },\n {\n id: LokiOperationId.LineMatchesRegex,\n name: 'Line contains regex match',\n params: [\n {\n name: '',\n type: 'string',\n hideName: true,\n restParam: true,\n placeholder: 'Pattern to match',\n description: 'Find log lines that match this regex pattern',\n minWidth: 30,\n runQueryOnEnter: true,\n },\n ],\n defaultParams: [''],\n alternativesKey: 'line filter',\n category: LokiVisualQueryOperationCategory.LineFilters,\n orderRank: LokiOperationOrder.LineFilters,\n renderer: getLineFilterRenderer('|~'),\n addOperationHandler: addLokiOperation,\n explainHandler: (op) => `Return log lines that match a \\`RE2\\` regex pattern. \\`${op.params?.join('`, or `')}\\`.`,\n },\n {\n id: LokiOperationId.LineMatchesRegexNot,\n name: 'Line does not match regex',\n params: [\n {\n name: '',\n type: 'string',\n hideName: true,\n restParam: true,\n placeholder: 'Pattern to exclude',\n description: 'Find log lines that does not match this regex pattern',\n minWidth: 30,\n runQueryOnEnter: true,\n },\n ],\n defaultParams: [''],\n alternativesKey: 'line filter',\n category: LokiVisualQueryOperationCategory.LineFilters,\n orderRank: LokiOperationOrder.LineFilters,\n renderer: getLineFilterRenderer('!~'),\n addOperationHandler: addLokiOperation,\n explainHandler: (op) =>\n `Return log lines that doesn't match a \\`RE2\\` regex pattern. \\`${op.params?.join('`, or `')}\\`.`,\n },\n {\n id: LokiOperationId.LineFilterIpMatches,\n name: 'IP line filter expression',\n params: [\n {\n name: 'Operator',\n type: 'string',\n minWidth: 16,\n options: [lokiOperators.contains, lokiOperators.doesNotContain],\n },\n {\n name: 'Pattern',\n type: 'string',\n placeholder: '',\n minWidth: 16,\n runQueryOnEnter: true,\n },\n ],\n defaultParams: ['|=', ''],\n alternativesKey: 'line filter',\n category: LokiVisualQueryOperationCategory.LineFilters,\n orderRank: LokiOperationOrder.LineFilters,\n renderer: (op, def, innerExpr) => `${innerExpr} ${op.params[0]} ip(\\`${op.params[1]}\\`)`,\n addOperationHandler: addLokiOperation,\n explainHandler: (op) => `Return log lines using IP matching of \\`${op.params[1]}\\``,\n },\n {\n id: LokiOperationId.LabelFilter,\n name: 'Label filter expression',\n params: [\n { name: 'Label', type: 'string', minWidth: 14 },\n {\n name: 'Operator',\n type: 'string',\n minWidth: 14,\n options: [\n lokiOperators.equals,\n lokiOperators.doesNotEqual,\n lokiOperators.matchesRegex,\n lokiOperators.doesNotMatchRegex,\n lokiOperators.greaterThan,\n lokiOperators.lessThan,\n lokiOperators.greaterThanOrEqual,\n lokiOperators.lessThanOrEqual,\n ],\n },\n { name: 'Value', type: 'string', minWidth: 14 },\n ],\n defaultParams: ['', '=', ''],\n alternativesKey: 'label filter',\n category: LokiVisualQueryOperationCategory.LabelFilters,\n orderRank: LokiOperationOrder.PipeOperations,\n renderer: labelFilterRenderer,\n addOperationHandler: addLokiOperation,\n explainHandler: () => `Label expression filter allows filtering using original and extracted labels.`,\n },\n {\n id: LokiOperationId.LabelFilterIpMatches,\n name: 'IP label filter expression',\n params: [\n { name: 'Label', type: 'string', minWidth: 14 },\n {\n name: 'Operator',\n type: 'string',\n minWidth: 14,\n options: [lokiOperators.equals, lokiOperators.doesNotEqual],\n },\n { name: 'Value', type: 'string', minWidth: 14 },\n ],\n defaultParams: ['', '=', ''],\n alternativesKey: 'label filter',\n category: LokiVisualQueryOperationCategory.LabelFilters,\n orderRank: LokiOperationOrder.PipeOperations,\n renderer: (model, def, innerExpr) =>\n `${innerExpr} | ${model.params[0]} ${model.params[1]} ip(\\`${model.params[2]}\\`)`,\n addOperationHandler: addLokiOperation,\n explainHandler: (op) => `Return log lines using IP matching of \\`${op.params[2]}\\` for \\`${op.params[0]}\\` label`,\n },\n {\n id: LokiOperationId.LabelFilterNoErrors,\n name: 'No pipeline errors',\n params: [],\n defaultParams: [],\n alternativesKey: 'label filter',\n category: LokiVisualQueryOperationCategory.LabelFilters,\n orderRank: LokiOperationOrder.NoErrors,\n renderer: (model, def, innerExpr) => `${innerExpr} | __error__=\\`\\``,\n addOperationHandler: addLokiOperation,\n explainHandler: () => `Filter out all formatting and parsing errors.`,\n },\n {\n id: LokiOperationId.Unwrap,\n name: 'Unwrap',\n params: [\n {\n name: 'Identifier',\n type: 'string',\n hideName: true,\n minWidth: 16,\n placeholder: 'Label key',\n editor: UnwrapParamEditor,\n },\n {\n name: 'Conversion function',\n hideName: true,\n type: 'string',\n options: ['duration', 'duration_seconds', 'bytes'],\n optional: true,\n },\n ],\n defaultParams: ['', ''],\n alternativesKey: 'format',\n category: LokiVisualQueryOperationCategory.Formats,\n orderRank: LokiOperationOrder.Unwrap,\n renderer: (op, def, innerExpr) =>\n `${innerExpr} | unwrap ${op.params[1] ? `${op.params[1]}(${op.params[0]})` : op.params[0]}`,\n addOperationHandler: addLokiOperation,\n explainHandler: (op) => {\n let label = String(op.params[0]).length > 0 ? op.params[0] : '';\n return `Use the extracted label \\`${label}\\` as sample values instead of log lines for the subsequent range aggregation.${\n op.params[1]\n ? ` Conversion function \\`${op.params[1]}\\` wrapping \\`${label}\\` will attempt to convert this label from a specific format (e.g. 3k, 500ms).`\n : ''\n }`;\n },\n },\n {\n id: LokiOperationId.Decolorize,\n name: 'Decolorize',\n params: [],\n defaultParams: [],\n alternativesKey: 'format',\n category: LokiVisualQueryOperationCategory.Formats,\n orderRank: LokiOperationOrder.PipeOperations,\n renderer: (op, def, innerExpr) => `${innerExpr} | decolorize`,\n addOperationHandler: addLokiOperation,\n explainHandler: () => `This will remove ANSI color codes from log lines.`,\n },\n {\n id: LokiOperationId.Drop,\n name: 'Drop',\n params: [\n // As drop can support both labels (e.g. job) and expressions (e.g. job=\"grafana\"), we\n // use input and not LabelParamEditor.\n {\n name: 'Label',\n type: 'string',\n restParam: true,\n optional: true,\n minWidth: 18,\n placeholder: 'job=\"grafana\"',\n description: 'Specify labels or expressions to drop.',\n },\n ],\n defaultParams: [''],\n alternativesKey: 'format',\n category: LokiVisualQueryOperationCategory.Formats,\n orderRank: LokiOperationOrder.PipeOperations,\n renderer: pipelineRenderer,\n addOperationHandler: addLokiOperation,\n explainHandler: () => 'The drop expression will drop the given labels in the pipeline.',\n },\n {\n id: LokiOperationId.Keep,\n name: 'Keep',\n params: [\n // As keep can support both labels (e.g. job) and expressions (e.g. job=\"grafana\"), we\n // use input and not LabelParamEditor.\n {\n name: 'Label',\n type: 'string',\n restParam: true,\n optional: true,\n minWidth: 18,\n placeholder: 'job=\"grafana\"',\n description: 'Specify labels or expressions to keep.',\n },\n ],\n defaultParams: [''],\n alternativesKey: 'format',\n category: LokiVisualQueryOperationCategory.Formats,\n orderRank: LokiOperationOrder.PipeOperations,\n renderer: pipelineRenderer,\n addOperationHandler: addLokiOperation,\n explainHandler: () =>\n 'The keep expression will keep only the specified labels in the pipeline and drop all the other labels.',\n },\n ...binaryScalarOperations,\n {\n id: LokiOperationId.NestedQuery,\n name: 'Binary operation with query',\n params: [],\n defaultParams: [],\n category: LokiVisualQueryOperationCategory.BinaryOps,\n renderer: (model, def, innerExpr) => innerExpr,\n addOperationHandler: addNestedQueryHandler,\n },\n ];\n\n return list;\n}\n\n// Keeping a local copy as an optimization measure.\nexport const operationDefinitions = getOperationDefinitions();\n\n/**\n * Given an operator, return the corresponding explain.\n * For usage within the Query Editor.\n */\nexport function explainOperator(id: LokiOperationId | string): string {\n const definition = operationDefinitions.find((operation) => operation.id === id);\n\n const explain = definition?.explainHandler?.({ id: '', params: [''] }) || '';\n\n // Strip markdown links\n return explain.replace(/\\[(.*)\\]\\(.*\\)/g, '$1');\n}\n\nexport function getDefinitionById(id: string): QueryBuilderOperationDefinition | undefined {\n return operationDefinitions.find((x) => x.id === id);\n}\n\nexport function checkParamsAreValid(\n def: QueryBuilderOperationDefinition,\n params: QueryBuilderOperationParamValue[]\n): boolean {\n // For now we only check if the operation has all the required params.\n if (params.length < def.params.filter((param) => !param.optional).length) {\n return false;\n }\n\n return true;\n}\n","import { SyntaxNode } from '@lezer/common';\n\nimport { QueryBuilderLabelFilter, QueryBuilderOperation, QueryBuilderOperationParamValue } from '@grafana/experimental';\nimport {\n And,\n BinOpExpr,\n Bool,\n By,\n ConvOp,\n Decolorize,\n DropLabel,\n DropLabels,\n DropLabelsExpr,\n Filter,\n FilterOp,\n Grouping,\n GroupingLabelList,\n GroupingLabels,\n Identifier,\n Ip,\n IpLabelFilter,\n Json,\n JsonExpressionParser,\n KeepLabel,\n KeepLabels,\n KeepLabelsExpr,\n LabelExtractionExpression,\n LabelFilter,\n LabelFormatMatcher,\n LabelParser,\n LineFilter,\n LineFormatExpr,\n LogfmtExpressionParser,\n LogfmtParser,\n LogRangeExpr,\n Matcher,\n MetricExpr,\n Number as NumberLezer,\n On,\n Or,\n parser,\n ParserFlag,\n Range,\n RangeAggregationExpr,\n RangeOp,\n String,\n UnitFilter,\n Unwrap,\n UnwrapExpr,\n VectorAggregationExpr,\n VectorOp,\n Without,\n BinOpModifier,\n OnOrIgnoringModifier,\n OrFilter,\n} from '@grafana/lezer-logql';\n\nimport { binaryScalarDefs } from './binaryScalarOperations';\nimport { checkParamsAreValid, getDefinitionById } from './operations';\nimport {\n ErrorId,\n getAllByType,\n getLeftMostChild,\n getString,\n makeBinOp,\n makeError,\n replaceVariables,\n} from './parsingUtils';\nimport { LokiOperationId, LokiVisualQuery, LokiVisualQueryBinary } from './types';\n\ninterface Context {\n query: LokiVisualQuery;\n errors: ParsingError[];\n}\n\ninterface ParsingError {\n text: string;\n from?: number;\n to?: number;\n parentType?: string;\n}\n\ninterface GetOperationResult {\n operation?: QueryBuilderOperation;\n error?: string;\n}\n\nexport function buildVisualQueryFromString(expr: string): Context {\n const replacedExpr = replaceVariables(expr);\n const tree = parser.parse(replacedExpr);\n const node = tree.topNode;\n\n // This will be modified in the handleExpression\n const visQuery: LokiVisualQuery = {\n labels: [],\n operations: [],\n };\n\n const context: Context = {\n query: visQuery,\n errors: [],\n };\n\n try {\n handleExpression(replacedExpr, node, context);\n } catch (err) {\n // Not ideal to log it here, but otherwise we would lose the stack trace.\n console.error(err);\n if (err instanceof Error) {\n context.errors.push({\n text: err.message,\n });\n }\n }\n\n // If we have empty query, we want to reset errors\n if (isEmptyQuery(context.query)) {\n context.errors = [];\n }\n return context;\n}\n\nexport function handleExpression(expr: string, node: SyntaxNode, context: Context) {\n const visQuery = context.query;\n switch (node.type.id) {\n case Matcher: {\n visQuery.labels.push(getLabel(expr, node));\n const err = node.getChild(ErrorId);\n if (err) {\n context.errors.push(makeError(expr, err));\n }\n break;\n }\n\n case LineFilter: {\n const { operation, error } = getLineFilter(expr, node);\n if (operation) {\n visQuery.operations.push(operation);\n }\n // Show error for query patterns not supported in visual query builder\n if (error) {\n context.errors.push(createNotSupportedError(expr, node, error));\n }\n break;\n }\n\n case LabelParser: {\n visQuery.operations.push(getLabelParser(expr, node));\n break;\n }\n\n case LabelFilter: {\n const { operation, error } = getLabelFilter(expr, node);\n if (operation) {\n visQuery.operations.push(operation);\n }\n // Show error for query patterns not supported in visual query builder\n if (error) {\n context.errors.push(createNotSupportedError(expr, node, error));\n }\n break;\n }\n case JsonExpressionParser: {\n visQuery.operations.push(getJsonExpressionParser(expr, node));\n break;\n }\n\n case LogfmtParser:\n case LogfmtExpressionParser: {\n const { operation, error } = getLogfmtParser(expr, node);\n if (operation) {\n visQuery.operations.push(operation);\n }\n if (error) {\n context.errors.push(createNotSupportedError(expr, node, error));\n }\n break;\n }\n\n case LineFormatExpr: {\n visQuery.operations.push(getLineFormat(expr, node));\n break;\n }\n\n case LabelFormatMatcher: {\n visQuery.operations.push(getLabelFormat(expr, node));\n break;\n }\n\n case UnwrapExpr: {\n const { operation, error } = handleUnwrapExpr(expr, node, context);\n if (operation) {\n visQuery.operations.push(operation);\n }\n // Show error for query patterns not supported in visual query builder\n if (error) {\n context.errors.push(createNotSupportedError(expr, node, error));\n }\n\n break;\n }\n\n case Decolorize: {\n visQuery.operations.push(getDecolorize());\n break;\n }\n\n case RangeAggregationExpr: {\n visQuery.operations.push(handleRangeAggregation(expr, node, context));\n break;\n }\n\n case VectorAggregationExpr: {\n visQuery.operations.push(handleVectorAggregation(expr, node, context));\n break;\n }\n\n case BinOpExpr: {\n handleBinary(expr, node, context);\n break;\n }\n\n case ErrorId: {\n if (isIntervalVariableError(node)) {\n break;\n }\n context.errors.push(makeError(expr, node));\n break;\n }\n\n case DropLabelsExpr: {\n visQuery.operations.push(handleDropFilter(expr, node, context));\n break;\n }\n\n case KeepLabelsExpr: {\n visQuery.operations.push(handleKeepFilter(expr, node, context));\n break;\n }\n\n default: {\n // Any other nodes we just ignore and go to its children. This should be fine as there are lots of wrapper\n // nodes that can be skipped.\n // TODO: there are probably cases where we will just skip nodes we don't support and we should be able to\n // detect those and report back.\n let child = node.firstChild;\n while (child) {\n handleExpression(expr, child, context);\n child = child.nextSibling;\n }\n }\n }\n}\n\nfunction getLabel(expr: string, node: SyntaxNode): QueryBuilderLabelFilter {\n const labelNode = node.getChild(Identifier);\n const label = getString(expr, labelNode);\n const op = getString(expr, labelNode?.nextSibling);\n let value = getString(expr, node.getChild(String));\n // `value` is wrapped in double quotes, so we need to remove them. As a value can contain double quotes, we can't use RegEx here.\n value = value.substring(1, value.length - 1);\n\n return {\n label,\n op,\n value,\n };\n}\n\nfunction getLineFilter(expr: string, node: SyntaxNode): GetOperationResult {\n const filter = getString(expr, node.getChild(Filter));\n const filterExpr = handleQuotes(getString(expr, node.getChild(String)));\n const ipLineFilter = node.getChild(FilterOp)?.getChild(Ip);\n if (ipLineFilter) {\n return {\n operation: {\n id: LokiOperationId.LineFilterIpMatches,\n params: [filter, filterExpr],\n },\n };\n }\n\n const params = [filterExpr];\n let orFilter = node.getChild(OrFilter);\n while (orFilter) {\n params.push(handleQuotes(getString(expr, orFilter.getChild(String))));\n orFilter = orFilter.getChild(OrFilter);\n }\n\n const mapFilter: Record = {\n '|=': LokiOperationId.LineContains,\n '!=': LokiOperationId.LineContainsNot,\n '|~': LokiOperationId.LineMatchesRegex,\n '!~': LokiOperationId.LineMatchesRegexNot,\n };\n\n return {\n operation: {\n id: mapFilter[filter],\n params,\n },\n };\n}\n\nfunction getLabelParser(expr: string, node: SyntaxNode): QueryBuilderOperation {\n const parserNode = node.firstChild;\n const parser = getString(expr, parserNode);\n\n const string = handleQuotes(getString(expr, node.getChild(String)));\n let params: QueryBuilderOperationParamValue[] = !!string ? [string] : [];\n const opDef = getDefinitionById(parser);\n if (opDef && !checkParamsAreValid(opDef, params)) {\n params = opDef?.defaultParams || [];\n }\n\n return {\n id: parser,\n params,\n };\n}\n\nfunction getJsonExpressionParser(expr: string, node: SyntaxNode): QueryBuilderOperation {\n const parserNode = node.getChild(Json);\n const parser = getString(expr, parserNode);\n\n const params = [...getAllByType(expr, node, LabelExtractionExpression)];\n return {\n id: parser,\n params,\n };\n}\n\nfunction getLogfmtParser(expr: string, node: SyntaxNode): GetOperationResult {\n const flags: string[] = [];\n const labels: string[] = [];\n let error: string | undefined = undefined;\n\n const offset = node.from;\n node.toTree().iterate({\n enter: (subNode) => {\n if (subNode.type.id === ParserFlag) {\n flags.push(expr.substring(subNode.from + offset, subNode.to + offset));\n } else if (subNode.type.id === LabelExtractionExpression) {\n labels.push(expr.substring(subNode.from + offset, subNode.to + offset));\n } else if (subNode.type.id === ErrorId) {\n error = `Unexpected string \"${expr.substring(subNode.from + offset, subNode.to + offset)}\"`;\n }\n },\n });\n\n const operation = {\n id: LokiOperationId.Logfmt,\n params: [flags.includes('--strict'), flags.includes('--keep-empty'), ...labels],\n };\n\n return {\n operation,\n error,\n };\n}\n\nfunction getLabelFilter(expr: string, node: SyntaxNode): GetOperationResult {\n // Check for nodes not supported in visual builder and return error\n if (node.getChild(Or) || node.getChild(And) || node.getChild('Comma')) {\n return {\n error: 'Label filter with comma, \"and\", \"or\" not supported in query builder',\n };\n }\n if (node.firstChild!.type.id === IpLabelFilter) {\n const ipLabelFilter = node.firstChild;\n const label = ipLabelFilter?.getChild(Identifier);\n const op = label?.nextSibling;\n const value = ipLabelFilter?.getChild(String);\n const valueString = handleQuotes(getString(expr, value));\n\n return {\n operation: {\n id: LokiOperationId.LabelFilterIpMatches,\n params: [getString(expr, label), getString(expr, op), valueString],\n },\n };\n }\n\n const id = LokiOperationId.LabelFilter;\n if (node.firstChild!.type.id === UnitFilter) {\n const filter = node.firstChild!.firstChild;\n const label = filter!.firstChild;\n const op = label!.nextSibling;\n const value = op!.nextSibling;\n const valueString = handleQuotes(getString(expr, value));\n\n return {\n operation: {\n id,\n params: [getString(expr, label), getString(expr, op), valueString],\n },\n };\n }\n // In this case it is Matcher or NumberFilter\n const filter = node.firstChild;\n const label = filter!.firstChild;\n const op = label!.nextSibling;\n const value = op!.nextSibling;\n const params = [getString(expr, label), getString(expr, op), handleQuotes(getString(expr, value))];\n\n // Special case of pipe filtering - no errors\n if (params.join('') === `__error__=`) {\n return {\n operation: {\n id: LokiOperationId.LabelFilterNoErrors,\n params: [],\n },\n };\n }\n\n return {\n operation: {\n id,\n params,\n },\n };\n}\n\nfunction getLineFormat(expr: string, node: SyntaxNode): QueryBuilderOperation {\n const id = LokiOperationId.LineFormat;\n const string = handleQuotes(getString(expr, node.getChild(String)));\n\n return {\n id,\n params: [string],\n };\n}\n\nfunction getLabelFormat(expr: string, node: SyntaxNode): QueryBuilderOperation {\n const id = LokiOperationId.LabelFormat;\n const renameTo = node.getChild(Identifier);\n const op = renameTo!.nextSibling;\n const originalLabel = op!.nextSibling;\n\n return {\n id,\n params: [getString(expr, originalLabel), handleQuotes(getString(expr, renameTo))],\n };\n}\n\nfunction getDecolorize(): QueryBuilderOperation {\n const id = LokiOperationId.Decolorize;\n\n return {\n id,\n params: [],\n };\n}\n\nfunction handleUnwrapExpr(expr: string, node: SyntaxNode, context: Context): GetOperationResult {\n const unwrapExprChild = node.getChild(UnwrapExpr);\n const labelFilterChild = node.getChild(LabelFilter);\n const unwrapChild = node.getChild(Unwrap);\n\n if (unwrapExprChild) {\n handleExpression(expr, unwrapExprChild, context);\n }\n\n if (labelFilterChild) {\n handleExpression(expr, labelFilterChild, context);\n }\n\n if (unwrapChild) {\n if (unwrapChild.nextSibling?.type.id === ConvOp) {\n const convOp = unwrapChild.nextSibling;\n const identifier = convOp.nextSibling;\n return {\n operation: {\n id: LokiOperationId.Unwrap,\n params: [getString(expr, identifier), getString(expr, convOp)],\n },\n };\n }\n\n return {\n operation: {\n id: LokiOperationId.Unwrap,\n params: [getString(expr, unwrapChild?.nextSibling), ''],\n },\n };\n }\n\n return {};\n}\n\nfunction handleRangeAggregation(expr: string, node: SyntaxNode, context: Context) {\n const nameNode = node.getChild(RangeOp);\n const funcName = getString(expr, nameNode);\n const number = node.getChild(NumberLezer);\n const logExpr = node.getChild(LogRangeExpr);\n const params = number !== null && number !== undefined ? [getString(expr, number)] : [];\n const range = logExpr?.getChild(Range);\n const rangeValue = range ? getString(expr, range) : null;\n const grouping = node.getChild(Grouping);\n\n if (rangeValue) {\n params.unshift(rangeValue.substring(1, rangeValue.length - 1));\n }\n\n if (grouping) {\n params.push(...getAllByType(expr, grouping, Identifier));\n }\n\n const op = {\n id: funcName,\n params,\n };\n\n if (logExpr) {\n handleExpression(expr, logExpr, context);\n }\n\n return op;\n}\n\nfunction handleVectorAggregation(expr: string, node: SyntaxNode, context: Context) {\n const nameNode = node.getChild(VectorOp);\n let funcName = getString(expr, nameNode);\n\n const grouping = node.getChild(Grouping);\n const params = [];\n\n const numberNode = node.getChild(NumberLezer);\n\n if (numberNode) {\n params.push(Number(getString(expr, numberNode)));\n }\n\n if (grouping) {\n const byModifier = grouping.getChild(By);\n if (byModifier && funcName) {\n funcName = `__${funcName}_by`;\n }\n\n const withoutModifier = grouping.getChild(Without);\n if (withoutModifier) {\n funcName = `__${funcName}_without`;\n }\n\n params.push(...getAllByType(expr, grouping, Identifier));\n }\n\n const metricExpr = node.getChild(MetricExpr);\n const op: QueryBuilderOperation = { id: funcName, params };\n\n if (metricExpr) {\n // A vector aggregation expression with a child of metric expression with a child of binary expression is ambiguous after being parsed into a visual query\n if (metricExpr.firstChild?.type.id === BinOpExpr) {\n context.errors.push({\n text: 'Query parsing is ambiguous.',\n from: metricExpr.firstChild.from,\n to: metricExpr.firstChild?.to,\n });\n }\n\n handleExpression(expr, metricExpr, context);\n }\n\n return op;\n}\n\nconst operatorToOpName = binaryScalarDefs.reduce>((acc, def) => {\n acc[def.sign] = {\n id: def.id,\n comparison: def.comparison,\n };\n return acc;\n}, {});\n\n/**\n * Right now binary expressions can be represented in 2 way in visual query. As additional operation in case it is\n * just operation with scalar or it creates a binaryQuery when it's 2 queries.\n * @param expr\n * @param node\n * @param context\n */\nfunction handleBinary(expr: string, node: SyntaxNode, context: Context) {\n const visQuery = context.query;\n const left = node.firstChild!;\n const op = getString(expr, left.nextSibling);\n const binModifier = getBinaryModifier(expr, node.getChild(BinOpModifier));\n\n const right = node.lastChild!;\n\n const opDef = operatorToOpName[op];\n\n const leftNumber = getLastChildWithSelector(left, 'MetricExpr.LiteralExpr.Number');\n const rightNumber = getLastChildWithSelector(right, 'MetricExpr.LiteralExpr.Number');\n\n const rightBinary = right.getChild(BinOpExpr);\n\n if (leftNumber) {\n // TODO: this should be already handled in case parent is binary expression as it has to be added to parent\n // if query starts with a number that isn't handled now.\n } else {\n // If this is binary we don't really know if there is a query or just chained scalars. So\n // we have to traverse a bit deeper to know\n handleExpression(expr, left, context);\n }\n\n if (rightNumber) {\n visQuery.operations.push(makeBinOp(opDef, expr, right, !!binModifier?.isBool));\n } else if (rightBinary) {\n // Due to the way binary ops are parsed we can get a binary operation on the right that starts with a number which\n // is a factor for a current binary operation. So we have to add it as an operation now.\n const leftMostChild = getLeftMostChild(right);\n if (leftMostChild?.type.id === NumberLezer) {\n visQuery.operations.push(makeBinOp(opDef, expr, leftMostChild, !!binModifier?.isBool));\n }\n\n // If we added the first number literal as operation here we still can continue and handle the rest as the first\n // number will be just skipped.\n handleExpression(expr, right, context);\n } else {\n visQuery.binaryQueries = visQuery.binaryQueries || [];\n const binQuery: LokiVisualQueryBinary = {\n operator: op,\n query: {\n labels: [],\n operations: [],\n },\n };\n if (binModifier?.isMatcher) {\n binQuery.vectorMatchesType = binModifier.matchType;\n binQuery.vectorMatches = binModifier.matches;\n }\n visQuery.binaryQueries.push(binQuery);\n handleExpression(expr, right, {\n query: binQuery.query,\n errors: context.errors,\n });\n }\n}\n\nfunction getBinaryModifier(\n expr: string,\n node: SyntaxNode | null\n):\n | { isBool: true; isMatcher: false }\n | { isBool: boolean; isMatcher: true; matches: string; matchType: 'ignoring' | 'on' }\n | undefined {\n if (!node) {\n return undefined;\n }\n const matcher = node.getChild(OnOrIgnoringModifier);\n const boolMatcher = node.getChild(Bool);\n\n if (!matcher && boolMatcher) {\n return { isBool: true, isMatcher: false };\n } else {\n if (!matcher) {\n // Not sure what this could be, maybe should be an error.\n return undefined;\n }\n const labels = getString(expr, matcher.getChild(GroupingLabels)?.getChild(GroupingLabelList));\n return {\n isMatcher: true,\n isBool: !!boolMatcher,\n matches: labels,\n matchType: matcher.getChild(On) ? 'on' : 'ignoring',\n };\n }\n}\n\nfunction isIntervalVariableError(node: SyntaxNode) {\n return node?.parent?.type.id === Range;\n}\n\nexport function handleQuotes(string: string) {\n if (string[0] === `\"` && string[string.length - 1] === `\"`) {\n return string\n .substring(1, string.length - 1)\n .replace(/\\\\\"/g, '\"')\n .replace(/\\\\\\\\/g, '\\\\');\n }\n return string.replace(/`/g, '');\n}\n\n/**\n * Simple helper to traverse the syntax tree. Instead of node.getChild('foo')?.getChild('bar')?.getChild('baz') you\n * can write getChildWithSelector(node, 'foo.bar.baz')\n * @param node\n * @param selector\n */\nfunction getLastChildWithSelector(node: SyntaxNode, selector: string) {\n let child: SyntaxNode | null = node;\n const children = selector.split('.');\n for (const s of children) {\n child = child.getChild(s);\n if (!child) {\n return null;\n }\n }\n return child;\n}\n\n/**\n * Helper function to enrich error text with information that visual query builder doesn't support that logQL\n * @param expr\n * @param node\n * @param error\n */\nfunction createNotSupportedError(expr: string, node: SyntaxNode, error: string) {\n const err = makeError(expr, node);\n err.text = `${error}: ${err.text}`;\n return err;\n}\n\nfunction isEmptyQuery(query: LokiVisualQuery) {\n if (query.labels.length === 0 && query.operations.length === 0) {\n return true;\n }\n return false;\n}\n\nfunction handleDropFilter(expr: string, node: SyntaxNode, context: Context): QueryBuilderOperation {\n const labels: string[] = [];\n let exploringNode = node.getChild(DropLabels);\n while (exploringNode) {\n const label = getString(expr, exploringNode.getChild(DropLabel));\n if (label) {\n labels.push(label);\n }\n exploringNode = exploringNode?.getChild(DropLabels);\n }\n labels.reverse();\n return {\n id: LokiOperationId.Drop,\n params: labels,\n };\n}\n\nfunction handleKeepFilter(expr: string, node: SyntaxNode, context: Context): QueryBuilderOperation {\n const labels: string[] = [];\n let exploringNode = node.getChild(KeepLabels);\n while (exploringNode) {\n const label = getString(expr, exploringNode.getChild(KeepLabel));\n if (label) {\n labels.push(label);\n }\n exploringNode = exploringNode?.getChild(KeepLabels);\n }\n labels.reverse();\n return {\n id: LokiOperationId.Keep,\n params: labels,\n };\n}\n","import { SyntaxNode, TreeCursor } from '@lezer/common';\n\nimport { QueryBuilderOperation, QueryBuilderOperationParamValue } from '@grafana/experimental';\n\n// Although 0 isn't explicitly provided in the @grafana/lezer-logql library as the error node ID, it does appear to be the ID of error nodes within lezer.\nexport const ErrorId = 0;\n\nexport function getLeftMostChild(cur: SyntaxNode): SyntaxNode {\n return cur.firstChild ? getLeftMostChild(cur.firstChild) : cur;\n}\n\nexport function makeError(expr: string, node: SyntaxNode) {\n return {\n text: getString(expr, node),\n // TODO: this are positions in the string with the replaced variables. Means it cannot be used to show exact\n // placement of the error for the user. We need some translation table to positions before the variable\n // replace.\n from: node.from,\n to: node.to,\n parentType: node.parent?.name,\n };\n}\n\n// Taken from template_srv, but copied so to not mess with the regex.index which is manipulated in the service\n/*\n * This regex matches 3 types of variable reference with an optional format specifier\n * \\$(\\w+) $var1\n * \\[\\[([\\s\\S]+?)(?::(\\w+))?\\]\\] [[var2]] or [[var2:fmt2]]\n * \\${(\\w+)(?::(\\w+))?} ${var3} or ${var3:fmt3}\n */\nexport const variableRegex = /\\$(\\w+)|\\[\\[([\\s\\S]+?)(?::(\\w+))?\\]\\]|\\${(\\w+)(?:\\.([^:^\\}]+))?(?::([^\\}]+))?}/g;\n\n/**\n * As variables with $ are creating parsing errors, we first replace them with magic string that is parsable and at\n * the same time we can get the variable and its format back from it.\n * @param expr\n */\nexport function replaceVariables(expr: string) {\n return expr.replace(variableRegex, (match, var1, var2, fmt2, var3, fieldPath, fmt3) => {\n const fmt = fmt2 || fmt3;\n let variable = var1;\n let varType = '0';\n\n if (var2) {\n variable = var2;\n varType = '1';\n }\n\n if (var3) {\n variable = var3;\n varType = '2';\n }\n\n return `__V_${varType}__` + variable + '__V__' + (fmt ? '__F__' + fmt + '__F__' : '');\n });\n}\n\nconst varTypeFunc = [\n (v: string, f?: string) => `\\$${v}`,\n (v: string, f?: string) => `[[${v}${f ? `:${f}` : ''}]]`,\n (v: string, f?: string) => `\\$\\{${v}${f ? `:${f}` : ''}\\}`,\n];\n\n/**\n * Get back the text with variables in their original format.\n * @param expr\n */\nexport function returnVariables(expr: string) {\n return expr.replace(/__V_(\\d)__(.+?)__V__(?:__F__(\\w+)__F__)?/g, (match, type, v, f) => {\n return varTypeFunc[parseInt(type, 10)](v, f);\n });\n}\n\n/**\n * Get the actual string of the expression. That is not stored in the tree so we have to get the indexes from the node\n * and then based on that get it from the expression.\n * @param expr\n * @param node\n */\nexport function getString(expr: string, node: SyntaxNode | TreeCursor | null | undefined) {\n if (!node) {\n return '';\n }\n return returnVariables(expr.substring(node.from, node.to));\n}\n\n/**\n * Create simple scalar binary op object.\n * @param opDef - definition of the op to be created\n * @param expr\n * @param numberNode - the node for the scalar\n * @param hasBool - whether operation has a bool modifier. Is used only for ops for which it makes sense.\n */\nexport function makeBinOp(\n opDef: { id: string; comparison?: boolean },\n expr: string,\n numberNode: SyntaxNode,\n hasBool: boolean\n): QueryBuilderOperation {\n const params: QueryBuilderOperationParamValue[] = [parseFloat(getString(expr, numberNode))];\n if (opDef.comparison) {\n params.push(hasBool);\n }\n return {\n id: opDef.id,\n params,\n };\n}\n\n/**\n * Get all nodes with type in the tree. This traverses the tree so it is safe only when you know there shouldn't be\n * too much nesting but you just want to skip some of the wrappers. For example getting function args this way would\n * not be safe is it would also find arguments of nested functions.\n * @param expr\n * @param cur\n * @param type - can be string or number, some data-sources (loki) haven't migrated over to using numeric constants defined in the lezer parsing library (e.g. lezer-promql).\n * @todo Remove string type definition when all data-sources have migrated to numeric constants\n */\nexport function getAllByType(expr: string, cur: SyntaxNode, type: number | string): string[] {\n if (cur.type.id === type || cur.name === type) {\n return [getString(expr, cur)];\n }\n const values: string[] = [];\n let pos = 0;\n let child = cur.childAfter(pos);\n while (child) {\n values.push(...getAllByType(expr, child, type));\n pos = child.to;\n child = cur.childAfter(pos);\n }\n return values;\n}\n\n/**\n * There aren't any spaces in the metric names, so let's introduce a wildcard into the regex for each space to better facilitate a fuzzy search\n */\nexport const regexifyLabelValuesQueryString = (query: string) => {\n const queryArray = query.split(' ');\n return queryArray.map((query) => `${query}.*`).join('');\n};\n","export function isLogLineJSON(line: string): boolean {\n let parsed;\n try {\n parsed = JSON.parse(line);\n } catch (error) {}\n // The JSON parser should only be used for log lines that are valid serialized JSON objects.\n return typeof parsed === 'object';\n}\n\n// This matches:\n// first a label from start of the string or first white space, then any word chars until \"=\"\n// second either an empty quotes, or anything that starts with quote and ends with unescaped quote,\n// or any non whitespace chars that do not start with quote\nconst LOGFMT_REGEXP = /(?:^|\\s)([\\w\\(\\)\\[\\]\\{\\}]+)=(\"\"|(?:\".*?[^\\\\]\"|[^\"\\s]\\S*))/;\n\nexport function isLogLineLogfmt(line: string): boolean {\n return LOGFMT_REGEXP.test(line);\n}\n\nexport function isLogLinePacked(line: string): boolean {\n let parsed;\n try {\n parsed = JSON.parse(line);\n return parsed.hasOwnProperty('_entry');\n } catch (error) {\n return false;\n }\n}\n","import { DataFrame, FieldType, isValidGoDuration, Labels } from '@grafana/data';\n\nimport { isBytesString, processLabels } from './languageUtils';\nimport { isLogLineJSON, isLogLineLogfmt, isLogLinePacked } from './lineParser';\nimport { LabelType } from './types';\n\nexport function dataFrameHasLokiError(frame: DataFrame): boolean {\n const labelSets: Labels[] = frame.fields.find((f) => f.name === 'labels')?.values ?? [];\n return labelSets.some((labels) => labels.__error__ !== undefined);\n}\n\nexport function dataFrameHasLevelLabel(frame: DataFrame): boolean {\n const labelSets: Labels[] = frame.fields.find((f) => f.name === 'labels')?.values ?? [];\n return labelSets.some((labels) => labels.level !== undefined);\n}\n\nexport function extractLogParserFromDataFrame(frame: DataFrame): {\n hasLogfmt: boolean;\n hasJSON: boolean;\n hasPack: boolean;\n} {\n const lineField = frame.fields.find((field) => field.type === FieldType.string);\n if (lineField == null) {\n return { hasJSON: false, hasLogfmt: false, hasPack: false };\n }\n\n const logLines: string[] = lineField.values;\n\n let hasJSON = false;\n let hasLogfmt = false;\n let hasPack = false;\n\n logLines.forEach((line) => {\n if (isLogLineJSON(line)) {\n hasJSON = true;\n\n hasPack = isLogLinePacked(line);\n }\n if (isLogLineLogfmt(line)) {\n hasLogfmt = true;\n }\n });\n\n return { hasLogfmt, hasJSON, hasPack };\n}\n\nexport function extractLabelKeysFromDataFrame(frame: DataFrame, type: LabelType = LabelType.Indexed): string[] {\n const labelsArray: Array<{ [key: string]: string }> | undefined =\n frame?.fields?.find((field) => field.name === 'labels')?.values ?? [];\n const labelTypeArray: Array<{ [key: string]: string }> | undefined =\n frame?.fields?.find((field) => field.name === 'labelTypes')?.values ?? [];\n\n if (!labelsArray?.length) {\n return [];\n }\n\n // if there are no label types and type is LabelType.Indexed return all label keys\n if (!labelTypeArray?.length) {\n if (type === LabelType.Indexed) {\n const { keys: labelKeys } = processLabels(labelsArray);\n return labelKeys;\n }\n return [];\n }\n\n // If we have label types, we can return only label keys that match type\n let labelsSet = new Set();\n for (let i = 0; i < labelsArray.length; i++) {\n const labels = labelsArray[i];\n const labelsType = labelTypeArray[i];\n\n const allLabelKeys = Object.keys(labels).filter((key) => labelsType[key] === type);\n labelsSet = new Set([...labelsSet, ...allLabelKeys]);\n }\n\n return Array.from(labelsSet);\n}\n\nexport function extractUnwrapLabelKeysFromDataFrame(frame: DataFrame): string[] {\n const labelsArray: Array<{ [key: string]: string }> | undefined =\n frame?.fields?.find((field) => field.name === 'labels')?.values ?? [];\n\n if (!labelsArray?.length) {\n return [];\n }\n\n // We do this only for first label object, because we want to consider only labels that are present in all log lines\n // possibleUnwrapLabels are labels with 1. number value OR 2. value that is valid go duration OR 3. bytes string value\n const possibleUnwrapLabels = Object.keys(labelsArray[0]).filter((key) => {\n const value = labelsArray[0][key];\n if (!value) {\n return false;\n }\n return !isNaN(Number(value)) || isValidGoDuration(value) || isBytesString(value);\n });\n\n // Add only labels that are present in every line to unwrapLabels\n return possibleUnwrapLabels.filter((label) => labelsArray.every((obj) => obj[label]));\n}\n\nexport function extractHasErrorLabelFromDataFrame(frame: DataFrame): boolean {\n const labelField = frame.fields.find((field) => field.name === 'labels' && field.type === FieldType.other);\n if (labelField == null) {\n return false;\n }\n\n const labels: Array<{ [key: string]: string }> = labelField.values;\n return labels.some((label) => label['__error__']);\n}\n\nexport function extractLevelLikeLabelFromDataFrame(frame: DataFrame): string | null {\n const labelField = frame.fields.find((field) => field.name === 'labels' && field.type === FieldType.other);\n if (labelField == null) {\n return null;\n }\n\n // Depending on number of labels, this can be pretty heavy operation.\n // Let's just look at first 2 lines If needed, we can introduce more later.\n const labelsArray: Array<{ [key: string]: string }> = labelField.values.slice(0, 2);\n let levelLikeLabel: string | null = null;\n\n // Find first level-like label\n for (let labels of labelsArray) {\n const label = Object.keys(labels).find((label) => label === 'lvl' || label.includes('level'));\n if (label) {\n levelLikeLabel = label;\n break;\n }\n }\n return levelLikeLabel;\n}\n","import { CoreApp, DashboardLoadedEvent, DataQueryRequest, DataQueryResponse } from '@grafana/data';\nimport { QueryEditorMode } from '@grafana/experimental';\nimport { reportInteraction, config } from '@grafana/runtime';\n\nimport {\n REF_ID_STARTER_ANNOTATION,\n REF_ID_DATA_SAMPLES,\n REF_ID_STARTER_LOG_ROW_CONTEXT,\n REF_ID_STARTER_LOG_VOLUME,\n} from './datasource';\nimport pluginJson from './plugin.json';\nimport { getNormalizedLokiQuery, isLogsQuery, obfuscate } from './queryUtils';\nimport { variableRegex } from './querybuilder/parsingUtils';\nimport { LokiGroupedRequest, LokiQuery, LokiQueryType } from './types';\n\ntype LokiOnDashboardLoadedTrackingEvent = {\n grafana_version?: string;\n dashboard_id?: string;\n org_id?: number;\n\n /* The number of Loki queries present in the dashboard*/\n queries_count: number;\n\n /* The number of Loki logs queries present in the dashboard*/\n logs_queries_count: number;\n\n /* The number of Loki metric queries present in the dashboard*/\n metric_queries_count: number;\n\n /* The number of Loki instant queries present in the dashboard*/\n instant_queries_count: number;\n\n /* The number of Loki range queries present in the dashboard*/\n range_queries_count: number;\n\n /* The number of Loki queries created in builder mode present in the dashboard*/\n builder_mode_queries_count: number;\n\n /* The number of Loki queries created in code mode present in the dashboard*/\n code_mode_queries_count: number;\n\n /* The number of Loki queries with used template variables present in the dashboard*/\n queries_with_template_variables_count: number;\n\n /* The number of Loki queries with changed resolution present in the dashboard*/\n queries_with_changed_resolution_count: number;\n\n /* The number of Loki queries with changed line limit present in the dashboard*/\n queries_with_changed_line_limit_count: number;\n\n /* The number of Loki queries with changed legend present in the dashboard*/\n queries_with_changed_legend_count: number;\n};\n\nexport type LokiTrackingSettings = {\n predefinedOperations?: string;\n};\n\nexport const onDashboardLoadedHandler = ({\n payload: { dashboardId, orgId, grafanaVersion, queries },\n}: DashboardLoadedEvent) => {\n try {\n // We only want to track visible Loki queries\n const lokiQueries = queries[pluginJson.id]\n ?.filter((query) => !query.hide)\n ?.map((query) => getNormalizedLokiQuery(query));\n\n if (!lokiQueries?.length) {\n return;\n }\n\n const logsQueries = lokiQueries.filter((query) => isLogsQuery(query.expr));\n const metricQueries = lokiQueries.filter((query) => !isLogsQuery(query.expr));\n const instantQueries = lokiQueries.filter((query) => query.queryType === LokiQueryType.Instant);\n const rangeQueries = lokiQueries.filter((query) => query.queryType === LokiQueryType.Range);\n const builderModeQueries = lokiQueries.filter((query) => query.editorMode === QueryEditorMode.Builder);\n const codeModeQueries = lokiQueries.filter((query) => query.editorMode === QueryEditorMode.Code);\n const queriesWithTemplateVariables = lokiQueries.filter(isQueryWithTemplateVariables);\n const queriesWithChangedResolution = lokiQueries.filter(isQueryWithChangedResolution);\n const queriesWithChangedLineLimit = lokiQueries.filter(isQueryWithChangedLineLimit);\n const queriesWithChangedLegend = lokiQueries.filter(isQueryWithChangedLegend);\n\n const event: LokiOnDashboardLoadedTrackingEvent = {\n grafana_version: grafanaVersion,\n dashboard_id: dashboardId,\n org_id: orgId,\n queries_count: lokiQueries.length,\n logs_queries_count: logsQueries.length,\n metric_queries_count: metricQueries.length,\n instant_queries_count: instantQueries.length,\n range_queries_count: rangeQueries.length,\n builder_mode_queries_count: builderModeQueries.length,\n code_mode_queries_count: codeModeQueries.length,\n queries_with_template_variables_count: queriesWithTemplateVariables.length,\n queries_with_changed_resolution_count: queriesWithChangedResolution.length,\n queries_with_changed_line_limit_count: queriesWithChangedLineLimit.length,\n queries_with_changed_legend_count: queriesWithChangedLegend.length,\n };\n\n reportInteraction('grafana_loki_dashboard_loaded', event);\n } catch (error) {\n console.error('error in loki tracking handler', error);\n }\n};\n\nconst isQueryWithTemplateVariables = (query: LokiQuery): boolean => {\n return variableRegex.test(query.expr);\n};\n\nconst isQueryWithChangedResolution = (query: LokiQuery): boolean => {\n if (!query.resolution) {\n return false;\n }\n // 1 is the default resolution\n return query.resolution !== 1;\n};\n\nconst isQueryWithChangedLineLimit = (query: LokiQuery): boolean => {\n return query.maxLines !== null && query.maxLines !== undefined;\n};\n\nconst isQueryWithChangedLegend = (query: LokiQuery): boolean => {\n if (!query.legendFormat) {\n return false;\n }\n return query.legendFormat !== '';\n};\n\nconst shouldNotReportBasedOnRefId = (refId: string): boolean => {\n const starters = [REF_ID_STARTER_ANNOTATION, REF_ID_STARTER_LOG_ROW_CONTEXT, REF_ID_STARTER_LOG_VOLUME];\n\n if (refId === REF_ID_DATA_SAMPLES || starters.some((starter) => refId.startsWith(starter))) {\n return true;\n }\n return false;\n};\n\nconst calculateTotalBytes = (response: DataQueryResponse): number => {\n let totalBytes = 0;\n for (const frame of response.data) {\n const byteKey = frame.meta?.custom?.lokiQueryStatKey;\n if (byteKey) {\n totalBytes +=\n frame.meta?.stats?.find((stat: { displayName: string }) => stat.displayName === byteKey)?.value ?? 0;\n }\n }\n return totalBytes;\n};\n\nexport function trackQuery(\n response: DataQueryResponse,\n request: DataQueryRequest,\n startTime: Date,\n trackingSettings: LokiTrackingSettings = {},\n extraPayload: Record = {}\n): void {\n // We only want to track usage for these specific apps\n const { app, targets: queries } = request;\n\n if (app === CoreApp.Dashboard || app === CoreApp.PanelViewer) {\n return;\n }\n\n let totalBytes = calculateTotalBytes(response);\n\n for (const query of queries) {\n if (shouldNotReportBasedOnRefId(query.refId)) {\n return;\n }\n\n reportInteraction('grafana_loki_query_executed', {\n app,\n grafana_version: config.buildInfo.version,\n editor_mode: query.editorMode,\n has_data: response.data.some((frame) => frame.length > 0),\n has_error: response.error !== undefined,\n legend: query.legendFormat,\n line_limit: query.maxLines,\n obfuscated_query: obfuscate(query.expr),\n query_type: isLogsQuery(query.expr) ? 'logs' : 'metric',\n query_vector_type: query.queryType,\n resolution: query.resolution,\n simultaneously_executed_query_count: queries.filter((query) => !query.hide).length,\n simultaneously_hidden_query_count: queries.filter((query) => query.hide).length,\n time_range_from: request?.range?.from?.toISOString(),\n time_range_to: request?.range?.to?.toISOString(),\n time_taken: Date.now() - startTime.getTime(),\n bytes_processed: totalBytes,\n is_split: false,\n predefined_operations_applied: trackingSettings.predefinedOperations\n ? query.expr.includes(trackingSettings.predefinedOperations)\n : 'n/a',\n ...extraPayload,\n });\n }\n}\n\nexport function trackGroupedQueries(\n response: DataQueryResponse,\n groupedRequests: LokiGroupedRequest[],\n originalRequest: DataQueryRequest,\n startTime: Date,\n trackingSettings: LokiTrackingSettings = {}\n): void {\n const splittingPayload = {\n split_query_group_count: groupedRequests.length,\n split_query_largest_partition_size: Math.max(...groupedRequests.map(({ partition }) => partition.length)),\n split_query_total_request_count: groupedRequests.reduce((total, { partition }) => total + partition.length, 0),\n is_split: true,\n simultaneously_executed_query_count: originalRequest.targets.filter((query) => !query.hide).length,\n simultaneously_hidden_query_count: originalRequest.targets.filter((query) => query.hide).length,\n };\n\n for (const group of groupedRequests) {\n const split_query_partition_size = group.partition.length;\n trackQuery(response, group.request, startTime, trackingSettings, {\n ...splittingPayload,\n split_query_partition_size,\n });\n }\n}\n","export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;","import REGEX from './regex.js';\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && REGEX.test(uuid);\n}\n\nexport default validate;","import { LRParser } from '@lezer/lr';\n\n// This file was generated by lezer-generator. You probably shouldn't edit it.\nconst Json$1 = 1,\n Logfmt$1 = 2,\n Unpack$1 = 3,\n Pattern$1 = 4,\n Regexp$1 = 5,\n Unwrap$1 = 6,\n LabelFormat$1 = 7,\n LineFormat$1 = 8,\n LabelReplace$1 = 9,\n Vector$1 = 10,\n Offset$1 = 11,\n Bool$1 = 12,\n On$1 = 13,\n Ignoring$1 = 14,\n GroupLeft$1 = 15,\n GroupRight$1 = 16,\n Decolorize$1 = 17,\n Drop$1 = 18,\n Keep$1 = 19,\n By$1 = 20,\n Without$1 = 21,\n And$1 = 22,\n Or$1 = 23,\n Unless$1 = 24,\n Sum$1 = 25,\n Avg$1 = 26,\n Count$1 = 27,\n Max$1 = 28,\n Min$1 = 29,\n Stddev$1 = 30,\n Stdvar$1 = 31,\n Bottomk$1 = 32,\n Topk$1 = 33,\n Sort$1 = 34,\n Sort_Desc$1 = 35,\n MetricExpr$1 = 93;\n\nconst keywordTokens = {\n json: Json$1,\n logfmt: Logfmt$1,\n unpack: Unpack$1,\n pattern: Pattern$1,\n regexp: Regexp$1,\n label_format: LabelFormat$1,\n line_format: LineFormat$1,\n label_replace: LabelReplace$1,\n vector: Vector$1,\n offset: Offset$1,\n bool: Bool$1,\n on: On$1,\n ignoring: Ignoring$1,\n group_left: GroupLeft$1,\n group_right: GroupRight$1,\n unwrap: Unwrap$1,\n decolorize: Decolorize$1,\n drop: Drop$1,\n keep: Keep$1,\n};\n\nconst specializeIdentifier = (value) => {\n return keywordTokens[value.toLowerCase()] || -1;\n};\n\nconst contextualKeywordTokens = {\n by: By$1,\n without: Without$1,\n and: And$1,\n or: Or$1,\n unless: Unless$1,\n sum: Sum$1,\n avg: Avg$1,\n count: Count$1,\n max: Max$1,\n min: Min$1,\n stddev: Stddev$1,\n stdvar: Stdvar$1,\n bottomk: Bottomk$1,\n topk: Topk$1,\n sort: Sort$1,\n sort_desc: Sort_Desc$1,\n};\n\nconst extendIdentifier = (value) => {\n return contextualKeywordTokens[value.toLowerCase()] || -1;\n};\n\n// This file was generated by lezer-generator. You probably shouldn't edit it.\nconst spec_Identifier = {__proto__:null,ip:291, count_over_time:297, rate:299, rate_counter:301, bytes_over_time:303, bytes_rate:305, avg_over_time:307, sum_over_time:309, min_over_time:311, max_over_time:313, stddev_over_time:315, stdvar_over_time:317, quantile_over_time:319, first_over_time:321, last_over_time:323, absent_over_time:325, bytes:331, duration:333, duration_seconds:335};\nconst parser = LRParser.deserialize({\n version: 14,\n states: \"EtOYQPOOO#cQPO'#DUO$rQPO'#DTOYQPO'#DTOOQO'#E_'#E_O%PQPO'#E^OOQO'#Ez'#EzO%UQPO'#EyQ%aQPOOOOQO'#FY'#FYO&bQPO'#FYO&gQPO'#FZO&lQPO'#F[OOQO'#E]'#E]OOQO'#DS'#DSOOQO'#E`'#E`OOQO'#Ea'#EaOOQO'#Eb'#EbOOQO'#Ec'#EcOOQO'#Ed'#EdOOQO'#Ee'#EeOOQO'#Ef'#EfOOQO'#Eg'#EgOOQO'#Eh'#EhOOQO'#Ei'#EiOOQO'#Ej'#EjOOQO'#Ek'#EkOOQO'#El'#ElOOQO'#Em'#EmOOQO'#En'#EnO&qQPO'#DWOOQO'#DV'#DVO'PQPO,59pOOQO'#Dc'#DcO'XQPO'#DbO'aQPO'#DaO(zQPO'#D`O*eQPO'#D`OOQO'#D_'#D_O,gQPO,59oO-uQPO,59oO-|QPO,5:wO.TQPO,5:xO.`QPO'#EwO0kQPO,5;eO0rQPO,5;eO0wQPO,5;gO0wQPO,5;gO0wQPO,5;gO0wQPO,5;gO0wQPO,5;gO0wQPO,5;gOOQO,5;t,5;tOYQPO,5;uO3ZQPO,5;vO3`QPO,59rO#cQPO,59qOOQO1G/[1G/[OOQO'#Df'#DfO3eQPO,59|O5OQPO,59|OOQO'#Dg'#DgO5TQPO,59{OOQO,59{,59{O5]QPO'#DWO5zQPO'#DjO7kQPO'#DmO9XQPO'#DmOOQO'#Dm'#DmOOQO'#Dt'#DtOOQO'#Dr'#DrO+TQPO'#DrO9^QPO,59zO:wQPO'#EQO:|QPO'#EROOQO'#EU'#EUO;RQPO'#EVO;WQPO'#EYOOQO,59z,59zOOQO,59y,59yOOQO1G/Z1G/ZOOQO1G0c1G0cO;]QPO'#EoO.WQPO'#EoO;qQPO1G0dO;vQPO1G0dO;{QPO,5;cOkAN>kO!;pQPO<pQPO7+&mO!>wQPO7+&mO!?UQPO7+&mO!@^QPO7+&mO!@eQPO7+&mO!?]QPO'#E{\",\n stateData: \"!@r~O$ROStOS~OXZOY[OiUOjUOkUOlUOmUOnUOoUOpUOqUOrUOsUO!sXO#vYO#wYO$SPO$WRO$Y_O$Z`O$[aO$]bO$^cO$_dO$`eO$afO$bgO$chO$diO$ejO$fkO$glO$hmO~O{nO~O!OqO!QqO!WqO!XqOfwXgwXhwX!jwX!lwX!mwX!nwX!owX#vwX#wwX#xwX#ywX#zwX#{wX~O!]uO$PwX$XwX~P#hO$WzO~Od{Oe{O$W|O~Of!POg!OOh!PO!O!TO!j!TO!l!TO!m!TO!n!TO!o!TO#v!QO#w!QO#x!RO#y!RO#z!RO#{!SO~O!s!UO~O$W!VO~O$W!WO~O|!XO!O!XO!P!XO!Q!XO~O$T!YO$U!ZO~O}!]O$V!_O~Og!`Of!TXh!TX!O!TX!Q!TX!W!TX!X!TX!]!TX!j!TX!l!TX!m!TX!n!TX!o!TX#v!TX#w!TX#x!TX#y!TX#z!TX#{!TX$P!TX$X!TX$i!TX$T!TX~O!OqO!QqO!WqO!XqOf!SXg!SXh!SX!]!SX!j!SX!l!SX!m!SX!n!SX!o!SX#v!SX#w!SX#x!SX#y!SX#z!SX#{!SX$P!SX$X!SX$i!SX$T!SX~OP!dOQ!cOR!fOS!eOT!eOV!lOW!kOa!mOb!nOc!oO{!bO$W!iO~O!OqO!QqO!WqO!XqOfwagwahwa!jwa!lwa!mwa!nwa!owa#vwa#wwa#xwa#ywa#zwa#{wa~O!]uO$Pwa$Xwa~P+]OfvXgvXhvX!OvX!jvX!lvX!mvX!nvX!ovX#vvX#wvX#xvX#yvX#zvX#{vX~O$X!rO~P,tO$X!sO~P,tO!s!wO$SPO$W!uO~O$W!xO~OXZOY[OiUOjUOkUOlUOmUOnUOoUOpUOqUOrUOsUO#vYO#wYO$SPO$WRO$Y_O$Z`O$[aO$]bO$^cO$_dO$`eO$afO$bgO$chO$diO$ejO$fkO$glO$hmO~O!s!zO~P.eO$W!{O~O[#OO]!|O^!|OX#pPY#pPi#pPj#pPk#pPl#pPm#pPn#pPo#pPp#pPq#pPr#pPs#pP!s#pP#v#pP#w#pP$S#pP$W#pP$Y#pP$Z#pP$[#pP$]#pP$^#pP$_#pP$`#pP$a#pP$b#pP$c#pP$d#pP$e#pP$f#pP$g#pP$h#pP~O!s#WO~O}#XO~Og#ZOf!Uah!Ua!O!Ua!Q!Ua!W!Ua!X!Ua!]!Ua!j!Ua!l!Ua!m!Ua!n!Ua!o!Ua#v!Ua#w!Ua#x!Ua#y!Ua#z!Ua#{!Ua$P!Ua$X!Ua$i!Ua$T!Ua~O$W#[O~O}#]O$V!_O~O|#`O!O#`O!P!XO!Q!XO!j#aO!l#aO!m#aO!n#aO!o#aO~O{#dO!`#bOf!^Xg!^Xh!^X!O!^X!Q!^X!W!^X!X!^X!]!^X!j!^X!l!^X!m!^X!n!^X!o!^X#v!^X#w!^X#x!^X#y!^X#z!^X#{!^X$P!^X$X!^X$i!^X$T!^X~O{#dOf!aXg!aXh!aX!O!aX!Q!aX!W!aX!X!aX!]!aX!j!aX!l!aX!m!aX!n!aX!o!aX#v!aX#w!aX#x!aX#y!aX#z!aX#{!aX$P!aX$X!aX$i!aX$T!aX~O}#hO~Of#jOg#kO$T#jOh!Sa!O!Sa!Q!Sa!W!Sa!X!Sa!]!Sa!j!Sa!l!Sa!m!Sa!n!Sa!o!Sa#v!Sa#w!Sa#x!Sa#y!Sa#z!Sa#{!Sa$P!Sa$X!Sa$i!Sa~O}#lO~O{#mO~O{#pO~O{#tO~O!OqO!QqO!WqO!XqO!]#xO$i#zO~O$X$PO~O$T$QO~O{$RO$X$TO~O$X$UO~P,tOf#|Xg#|Xh#|X!O#|X!j#|X!l#|X!m#|X!n#|X!o#|X#v#|X#w#|X#x#|X#y#|X#z#|X#{#|X$X#|X~O$T$VO~P<[O!s$XO~P.eO$W$YO~OX#pXY#pXi#pXj#pXk#pXl#pXm#pXn#pXo#pXp#pXq#pXr#pXs#pX!s#pX#v#pX#w#pX$S#pX$W#pX$Y#pX$Z#pX$[#pX$]#pX$^#pX$_#pX$`#pX$a#pX$b#pX$c#pX$d#pX$e#pX$f#pX$g#pX$h#pX~O_$[O`$[O~P=sO]!|O^!|O~P=sO$T$dO~P,tO$X$eO~O}$gO~Og$hOf![Xh![X!O![X!Q![X!W![X!X![X!]![X!j![X!l![X!m![X!n![X!o![X#v![X#w![X#x![X#y![X#z![X#{![X$P![X$X![X$i![X$T![X~O$W$iO~O}#XO!k$kO!q$lO!s$mO$V!_O~O!k$kO!q$lO!s$mO~O{#dO!`$nOf!^ag!^ah!^a!O!^a!Q!^a!W!^a!X!^a!]!^a!j!^a!l!^a!m!^a!n!^a!o!^a#v!^a#w!^a#x!^a#y!^a#z!^a#{!^a$P!^a$X!^a$i!^a$T!^a~O|$pOf!dXg!dXh!dX!O!dX!Q!dX!W!dX!X!dX!]!dX!j!dX!l!dX!m!dX!n!dX!o!dX#v!dX#w!dX#x!dX#y!dX#z!dX#{!dX$P!dX$T!dX$X!dX$i!dX~O$T$qOf!eag!eah!ea!O!ea!Q!ea!W!ea!X!ea!]!ea!j!ea!l!ea!m!ea!n!ea!o!ea#v!ea#w!ea#x!ea#y!ea#z!ea#{!ea$P!ea$X!ea$i!ea~O$T$qOf!bag!bah!ba!O!ba!Q!ba!W!ba!X!ba!]!ba!j!ba!l!ba!m!ba!n!ba!o!ba#v!ba#w!ba#x!ba#y!ba#z!ba#{!ba$P!ba$X!ba$i!ba~Of#jOg#kO$T#jO$X$rO~O|$tO~O$T$uOf!uag!uah!ua!O!ua!Q!ua!W!ua!X!ua!]!ua!j!ua!l!ua!m!ua!n!ua!o!ua#v!ua#w!ua#x!ua#y!ua#z!ua#{!ua$P!ua$X!ua$i!ua~O|!XO!O!XO!P!XO!Q!XOf!{Xg!{Xh!{X!W!{X!X!{X!]!{X!j!{X!l!{X!m!{X!n!{X!o!{X#v!{X#w!{X#x!{X#y!{X#z!{X#{!{X$P!{X$T!{X$X!{X$i!{X~O$T$vOf!yag!yah!ya!O!ya!Q!ya!W!ya!X!ya!]!ya!j!ya!l!ya!m!ya!n!ya!o!ya#v!ya#w!ya#x!ya#y!ya#z!ya#{!ya$P!ya$X!ya$i!ya~O|!XO!O!XO!P!XO!Q!XOf#OXg#OXh#OX!W#OX!X#OX!]#OX!j#OX!l#OX!m#OX!n#OX!o#OX#v#OX#w#OX#x#OX#y#OX#z#OX#{#OX$P#OX$T#OX$X#OX$i#OX~O$T$wOf!|ag!|ah!|a!O!|a!Q!|a!W!|a!X!|a!]!|a!j!|a!l!|a!m!|a!n!|a!o!|a#v!|a#w!|a#x!|a#y!|a#z!|a#{!|a$P!|a$X!|a$i!|a~OU$xO~P*eO!k${O~O!]$|O$i#zO~O!OqO!QqO!WqO!XqO!]#xO~OZ%OO$X#ca~P!$YO$X%TO~P;]O$X%UO~Od{Oe{Of#Qqg#Qqh#Qq!O#Qq!j#Qq!l#Qq!m#Qq!n#Qq!o#Qq#v#Qq#w#Qq#x#Qq#y#Qq#z#Qq#{#Qq$P#Qq$X#Qq$T#Qq~O$T%XO$X%YO~Od{Oe{Of#mqg#mqh#mq!O#mq!j#mq!l#mq!m#mq!n#mq!o#mq#v#mq#w#mq#x#mq#y#mq#z#mq#{#mq$P#mq$X#mq$T#mq~O$X%ZO~P,tO$T%]O~P<[O#u%^O$X%aO~OX#paY#pai#paj#pak#pal#pam#pan#pao#pap#paq#par#pas#pa!s#pa#v#pa#w#pa$S#pa$Y#pa$Z#pa$[#pa$]#pa$^#pa$_#pa$`#pa$a#pa$b#pa$c#pa$d#pa$e#pa$f#pa$g#pa$h#pa~O$W$YO~P!(cO_%cO`%cO$W#pa~P!(cOf!POh!PO!O!TO!j!TO!l!TO!m!TO!n!TO!o!TO#v!QO#w!QO#x#oq#y#oq#z#oq#{#oq$P#oq$X#oq~Og#oq~P!*}Of#oqg#oqh#oq~P!+TOg!OO~P!*}O$P#oq$X#oq~P%aOf#oqg#oqh#oq!O#oq!j#oq!l#oq!m#oq!n#oq!o#oq#x#oq#y#oq#z#oq#{#oq~O#v!QO#w!QO$P#oq$X#oq~P!,xO}%dO~O$X%eO~O}%gO~O$W%hO~O$T$qOf!eig!eih!ei!O!ei!Q!ei!W!ei!X!ei!]!ei!j!ei!l!ei!m!ei!n!ei!o!ei#v!ei#w!ei#x!ei#y!ei#z!ei#{!ei$P!ei$X!ei$i!ei~O}%iO~O{#dO~Of#jO$T#jOg!fih!fi!O!fi!Q!fi!W!fi!X!fi!]!fi!j!fi!l!fi!m!fi!n!fi!o!fi#v!fi#w!fi#x!fi#y!fi#z!fi#{!fi$P!fi$X!fi$i!fi~O{%kO}%kO~O{%pO$k%rO$l%sO$m%tO~OZ%OO$X#ci~O$j%vO~O$X#ci~P!$YO!k%yO~O!]$|O$X#ci~O$X%{O~P;]O!]$|O$X%{O$i#zO~O$X%}O~O{&OO~O$X&PO~P,tO$T&RO$X&SO~O$W$YOX#piY#pii#pij#pik#pil#pim#pin#pio#pip#piq#pir#pis#pi!s#pi#v#pi#w#pi$S#pi$Y#pi$Z#pi$[#pi$]#pi$^#pi$_#pi$`#pi$a#pi$b#pi$c#pi$d#pi$e#pi$f#pi$g#pi$h#pi~O$T&UO~O$X&VO~O}&WO~O$W&XO~Of#jOg#kO$T#jO!]#fi$i#fi$X#fi~O!]$|O$X#cq~O$X#cq~P!$YOZ%OO!]&[O$X#cq~Od{Oe{Of#Q!Rg#Q!Rh#Q!R!O#Q!R!j#Q!R!l#Q!R!m#Q!R!n#Q!R!o#Q!R#v#Q!R#w#Q!R#x#Q!R#y#Q!R#z#Q!R#{#Q!R$P#Q!R$X#Q!R$T#Q!R~Od{Oe{Of#m!Rg#m!Rh#m!R!O#m!R!j#m!R!l#m!R!m#m!R!n#m!R!o#m!R#v#m!R#w#m!R#x#m!R#y#m!R#z#m!R#{#m!R$P#m!R$X#m!R$T#m!R~O$X&_O~P,tO#u%^O$X&aO~O}&bO~O$X&cO~O{&dO~O!]$|O$X#cy~OZ%OO$X#cy~OU$xO~O!]&[O$X#cy~O$T&gO~O$X&hO~O!]$|O$X#c!R~O}&jO~O$T&kO~O}&lO~O$X&mO~OP!dOQ!cOR!fOS!eOT!eOV&nOW!kOa!mOb!nOc!oO{!bO$W!iO~O!]&oO$Twa~P+]O!]&oO$TwX~P#hOf&yOh&yO!O&}O!j&}O!l&}O!m&}O!n&}O!o&}O#v&zO#w&zO#x#oq#y#oq#z#oq#{#oq$T#oq~Og#oq~P!=oOf#oqg#oqh#oq~P!=uOg&xO~P!=oOf&yOg&xOh&yO!O&}O!j&}O!l&}O!m&}O!n&}O!o&}O#v&zO#w&zO#x&{O#y&{O#z&{O#{&|O~O$T#oq~P!?]O#v&zO#w&zO$T#oq~P!,xO\",\n goto: \"1P$PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP$Q%P%h&W&ZPPPPPP&r'U'f't(VPP(f(n(wP)Q)VP)Q)Q)Y)c)Q)k)|)|*VPPPPPP*VP)|P)Q)Q*`*f)Q)Q*m*p)Q*v*y+P+r,X,n,n,n,n,n,n,n,n,n,n,n,n,n,n,n-T-^-q-}.g.j.j.j.m.|+r/P+r/f0[0m0v0yPPPPPPP+r+r+r[WOR|!{$V%]Q$^#PQ$_#QS$`#R&tQ$a#SQ$b#TQ$c#UQ'O&rQ'P&sQ'Q&uQ'R&vQ'S&wR'T!Vt^O|!V!{#P#Q#R#S#T#U$V%]&r&s&t&u&v&wRxRjQOR|!V!{#P#Q#R#S#T#U$V%]S!tz$QQ#}!u]&q&r&s&t&u&v&wRpPQoP^!hu!i#j#k#x$|&oQ#Y!YS#q!n$vT#u!o$wQwQQ#y!tQ$}#|Q%R#}Q%z%QR&p&q[vQ!t#|#}%Q&q]!qw#y$}%R%z&pitQw!t#y#|#}$}%Q%R%z&p&qhsQw!t#y#|#}$}%Q%R%z&p&qR!atkrQtw!t#y#|#}$}%Q%R%z&p&qQ!^rV#^!`#Z$hW![r!`#Z$hR$j#`Q#_!`Q$f#ZR%f$hV!pu#x&oR#c!cQ#f!cQ#g!dR$o#cU#e!c!d#cR%j$qU!ju#x&oQ#i!iQ$r#jQ$s#kR%w$|_!hu!i#j#k#x$|&o_!gu!i#j#k#x$|&oQ#o!lR&i&nS#n!l&nR%l$uR#s!nQ#r!nR%m$vR#w!oQ#v!oR%n$wj^O#P#Q#R#S#T#U&r&s&t&u&v&wQyRQ!y|Q#V!VQ$W!{Q%[$VR&Q%]w]OR|!V!{#P#Q#R#S#T#U$V%]&r&s&t&u&v&wwTOR|!V!{#P#Q#R#S#T#U$V%]&r&s&t&u&v&wwSOR|!V!{#P#Q#R#S#T#U$V%]&r&s&t&u&v&wQ!vzQ$O!uR%W$QS#|!t#}W$z#y#{%R%SQ%u$yQ%|%TR&Z%{Q%Q#|Q%u$zQ&]%|R&e&ZQ#{!tS$y#y%RQ%P#|Q%S#}S%x$}%QS&Y%z%|R&f&]R%q$xR%o$xQ}VQ%V$PQ%Z$UQ&^%}R&_&PR$S!xwVOR|!V!{#P#Q#R#S#T#U$V%]&r&s&t&u&v&wQ#P!OQ#Q!PQ#R!QQ#S!RQ#T!SQ#U!TQ&r&xQ&s&yQ&t&zQ&u&{Q&v&|R&w&}h!}!O!P!Q!R!S!T&x&y&z&{&|&}R$]#OQ$Z!|Q%b$[R&T%cR%_$YQ%`$YR&`&R\",\n nodeNames: \"⚠ Json Logfmt Unpack Pattern Regexp Unwrap LabelFormat LineFormat LabelReplace Vector Offset Bool On Ignoring GroupLeft GroupRight Decolorize Drop Keep By Without And Or Unless Sum Avg Count Max Min Stddev Stdvar Bottomk Topk Sort Sort_Desc LineComment LogQL Expr LogExpr Selector Matchers Matcher Identifier Eq String Neq Re Nre PipelineExpr PipelineStage LineFilters LineFilter Filter PipeExact PipeMatch FilterOp Ip OrFilter Pipe LogfmtParser LogfmtParserFlags ParserFlag LabelParser JsonExpressionParser LabelExtractionExpressionList LabelExtractionExpression LogfmtExpressionParser LabelFilter IpLabelFilter UnitFilter DurationFilter Gtr Duration Gte Lss Lte Eql BytesFilter Bytes NumberFilter Number LineFormatExpr LabelFormatExpr LabelsFormat LabelFormatMatcher DecolorizeExpr DropLabelsExpr DropLabels DropLabel KeepLabelsExpr KeepLabels KeepLabel MetricExpr RangeAggregationExpr RangeOp CountOverTime Rate RateCounter BytesOverTime BytesRate AvgOverTime SumOverTime MinOverTime MaxOverTime StddevOverTime StdvarOverTime QuantileOverTime FirstOverTime LastOverTime AbsentOverTime LogRangeExpr Range OffsetExpr UnwrapExpr ConvOp BytesConv DurationConv DurationSecondsConv Grouping Labels VectorAggregationExpr VectorOp BinOpExpr BinOpModifier OnOrIgnoringModifier GroupingLabels GroupingLabelList GroupingLabel LabelName Add Sub Mul Div Mod Pow LiteralExpr LabelReplaceExpr VectorExpr\",\n maxTerm: 167,\n skippedNodes: [0,36],\n repeatNodeCount: 0,\n tokenData: \"5b~RvX^#ipq#iqr$^rs$qst%cuv%nxy%syz%xz{%}{|&S|}&X}!O&^!O!P(l!P!Q)l!Q!R)q!R![+X![!]2X!^!_2m!_!`2z!`!a3a!c!}3n!}#O4U#P#Q4Z#Q#R4`#R#S3n#S#T4e#T#o3n#o#p4q#p#q4v#q#r5]#y#z#i$f$g#i#BY#BZ#i$IS$I_#i$I|$JO#i$JT$JU#i$KV$KW#i&FU&FV#i~#nY$R~X^#ipq#i#y#z#i$f$g#i#BY#BZ#i$IS$I_#i$I|$JO#i$JT$JU#i$KV$KW#i&FU&FV#i~$aQ!_!`$g#r#s$l~$lO!O~~$qO!Q~~$tUOY$qZr$qrs%Ws#O$q#O#P%]#P~$q~%]O}~~%`PO~$q~%hQt~OY%cZ~%c~%sO#z~~%xO$W~~%}O$X~~&SO#x~~&XO#v~~&^O$T~~&cP#w~}!O&f~&iQ#_#`&o#g#h'|~&rP#X#Y&u~&xP#X#Y&{~'OP#d#e'R~'UP}!O'X~'[P#X#Y'_~'bP#a#b'e~'hP#d#e'k~'nP#h#i'q~'tP#m#n'w~'|O!`~~(PP#h#i(S~(VP#f#g(Y~(]P#]#^(`~(cP#V#W(f~(iP#h#i'w~(oP!Q![(r~(wR!s~!Q![(r!g!h)Q#X#Y)Q~)TR{|)^}!O)^!Q![)d~)aP!Q![)d~)iP!s~!Q![)d~)qO#y~~)ve!s~!O!P(r!Q![+X!g!h,l!i!j-Z!m!n-Z!o!p-Z!r!s-Z!v!w-Z#U#V-O#W#X-d#X#Y/f#Z#[/x#[#]-{#_#`/x#a#b0R#d#e/x#g#h/T#h#i/x#k#l0d#l#m1m#m#n1O~+^d!s~!O!P(r!Q![+X!g!h,l!i!j-Z!m!n-Z!o!p-Z!r!s-Z!v!w-Z#U#V-O#W#X-d#X#Y/f#Z#[/x#[#]-{#_#`/x#a#b0R#d#e/x#g#h/T#h#i/x#k#l0d#m#n1O~,oT{|)^}!O)^!Q![)d!d!e-O#]#^-T~-TO!q~~-WP#U#V-O~-^Q!d!e-O#]#^-T~-iP!k~!Q![-l~-oS!Q![-l#[#]-{#a#b.a#g#h/T~.QP!k~!Q![.T~.WR!Q![.T#a#b.a#g#h/T~.fQ!k~!Q![.l#g#h/O~.oR!Q![.l#a#b.x#g#h/T~.{P#g#h/O~/TO!k~~/YP!k~!Q![/]~/`Q!Q![/]#a#b.x~/iT{|)^}!O)^!Q![)d#U#V-O#]#^-T~/{Q#U#V-O#]#^-T~0WS!k~!Q![.l#U#V-O#]#^-T#g#h/O~0iP!k~!Q![0l~0oT!Q![0l#W#X-d#[#]-{#a#b.a#g#h/T~1TP!k~!Q![1W~1ZU!Q![1W#W#X-d#[#]-{#a#b.a#g#h/T#k#l0d~1pR!Q![1y!c!i1y#T#Z1y~2OR!s~!Q![1y!c!i1y#T#Z1yP2^T{P!Q![2X![!]2X!c!}2X#R#S2X#T#o2X~2rP!m~!_!`2u~2zO!n~~3PQ|~!_!`3V#r#s3[~3[O!o~~3aO!P~~3fP!j~!_!`3i~3nO!l~R3uT{P#uQ!Q![3n![!]2X!c!}3n#R#S3n#T#o3n~4ZO$i~~4`O$j~~4eO#{~~4hRO#S4e#S#T%W#T~4e~4vO$S~~4{Q!]~!_!`5R#r#s5W~5WO!W~~5]O!X~~5bO$U~\",\n tokenizers: [0, 1],\n topRules: {\"LogQL\":[0,37]},\n specialized: [{term: 43, get: (value, stack) => (specializeIdentifier(value) << 1)},{term: 43, get: (value, stack) => (extendIdentifier(value) << 1) | 1},{term: 43, get: value => spec_Identifier[value] || -1}],\n tokenPrec: 0\n});\n// This file was generated by lezer-generator. You probably shouldn't edit it.\nconst Json = 1,\n Logfmt = 2,\n Unpack = 3,\n Pattern = 4,\n Regexp = 5,\n Unwrap = 6,\n LabelFormat = 7,\n LineFormat = 8,\n LabelReplace = 9,\n Vector = 10,\n Offset = 11,\n Bool = 12,\n On = 13,\n Ignoring = 14,\n GroupLeft = 15,\n GroupRight = 16,\n Decolorize = 17,\n Drop = 18,\n Keep = 19,\n By = 20,\n Without = 21,\n And = 22,\n Or = 23,\n Unless = 24,\n Sum = 25,\n Avg = 26,\n Count = 27,\n Max = 28,\n Min = 29,\n Stddev = 30,\n Stdvar = 31,\n Bottomk = 32,\n Topk = 33,\n Sort = 34,\n Sort_Desc = 35,\n LineComment = 36,\n LogQL = 37,\n Expr = 38,\n LogExpr = 39,\n Selector = 40,\n Matchers = 41,\n Matcher = 42,\n Identifier = 43,\n Eq = 44,\n String = 45,\n Neq = 46,\n Re = 47,\n Nre = 48,\n PipelineExpr = 49,\n PipelineStage = 50,\n LineFilters = 51,\n LineFilter = 52,\n Filter = 53,\n PipeExact = 54,\n PipeMatch = 55,\n FilterOp = 56,\n Ip = 57,\n OrFilter = 58,\n Pipe = 59,\n LogfmtParser = 60,\n LogfmtParserFlags = 61,\n ParserFlag = 62,\n LabelParser = 63,\n JsonExpressionParser = 64,\n LabelExtractionExpressionList = 65,\n LabelExtractionExpression = 66,\n LogfmtExpressionParser = 67,\n LabelFilter = 68,\n IpLabelFilter = 69,\n UnitFilter = 70,\n DurationFilter = 71,\n Gtr = 72,\n Duration = 73,\n Gte = 74,\n Lss = 75,\n Lte = 76,\n Eql = 77,\n BytesFilter = 78,\n Bytes = 79,\n NumberFilter = 80,\n Number = 81,\n LineFormatExpr = 82,\n LabelFormatExpr = 83,\n LabelsFormat = 84,\n LabelFormatMatcher = 85,\n DecolorizeExpr = 86,\n DropLabelsExpr = 87,\n DropLabels = 88,\n DropLabel = 89,\n KeepLabelsExpr = 90,\n KeepLabels = 91,\n KeepLabel = 92,\n MetricExpr = 93,\n RangeAggregationExpr = 94,\n RangeOp = 95,\n CountOverTime = 96,\n Rate = 97,\n RateCounter = 98,\n BytesOverTime = 99,\n BytesRate = 100,\n AvgOverTime = 101,\n SumOverTime = 102,\n MinOverTime = 103,\n MaxOverTime = 104,\n StddevOverTime = 105,\n StdvarOverTime = 106,\n QuantileOverTime = 107,\n FirstOverTime = 108,\n LastOverTime = 109,\n AbsentOverTime = 110,\n LogRangeExpr = 111,\n Range = 112,\n OffsetExpr = 113,\n UnwrapExpr = 114,\n ConvOp = 115,\n BytesConv = 116,\n DurationConv = 117,\n DurationSecondsConv = 118,\n Grouping = 119,\n Labels = 120,\n VectorAggregationExpr = 121,\n VectorOp = 122,\n BinOpExpr = 123,\n BinOpModifier = 124,\n OnOrIgnoringModifier = 125,\n GroupingLabels = 126,\n GroupingLabelList = 127,\n GroupingLabel = 128,\n LabelName = 129,\n Add = 130,\n Sub = 131,\n Mul = 132,\n Div = 133,\n Mod = 134,\n Pow = 135,\n LiteralExpr = 136,\n LabelReplaceExpr = 137,\n VectorExpr = 138;\n\nfunction getNodeFromQuery(query, nodeType) {\n const nodes = [];\n const tree = parser.parse(query);\n tree.iterate({\n enter: (node) => {\n if (nodeType === undefined || nodeType === node.type.id) {\n nodes.push(node.node);\n }\n },\n });\n return nodes[0];\n}\n\nfunction isLogsQuery(query) {\n if (getNodeFromQuery(query, MetricExpr$1)) {\n return false;\n }\n return true;\n}\n\nfunction indent(level) {\n return ' '.repeat(level);\n}\n\nfunction indentMultiline(block, level) {\n const lines = block.split('\\n');\n return lines.map((line) => indent(level) + line).join('\\n');\n}\n\nfunction trimMultiline(block) {\n const lines = block.split('\\n');\n return lines.map((line) => line.trimEnd()).join('\\n');\n}\n\nfunction needsBrackets(node, queryType) {\n const childNodeIsSame = node.firstChild?.type.id === queryType;\n let addBrackets = false;\n\n if (node.firstChild && childNodeIsSame) {\n addBrackets = true;\n node = node.firstChild;\n }\n\n return { addBrackets, newNode: node };\n}\n\nfunction iterateNode(node, lookingFor) {\n const nodes = [];\n let child = node.firstChild;\n\n while (child) {\n if (lookingFor.includes(child.type.id)) {\n nodes.push(child);\n }\n\n nodes.push(...iterateNode(child, lookingFor));\n child = child.nextSibling;\n }\n\n return nodes;\n}\n\nfunction buildResponse(pipelineType, lastPipelineType, formattedNode) {\n if (lastPipelineType === pipelineType) {\n return ` ${formattedNode}`;\n }\n\n return `\\n${indent(1)}${formattedNode}`;\n}\n\nfunction trimEnd(input, charactersToTrim) {\n let endIndex = input.length - 1;\n while (endIndex >= 0 && charactersToTrim.includes(input[endIndex])) {\n endIndex--;\n }\n return input.substring(0, endIndex + 1);\n}\n\nconst formatLogExpr = (node, query) => {\n const { addBrackets, newNode } = needsBrackets(node, LogExpr);\n node = newNode;\n\n const tree = parser.parse(query.substring(node.from, node.to));\n let formatted = '';\n\n tree.iterate({\n enter: (ref) => {\n const node = ref.node;\n\n switch (node.type.id) {\n case Selector:\n formatted += formatSelector(node, query);\n break;\n\n case PipelineExpr:\n node.parent?.type.id !== PipelineExpr && (formatted += formatPipelineExpr(node, query));\n break;\n }\n },\n });\n\n return addBrackets ? '(' + formatted + ')' : formatted;\n};\n\nfunction formatSelector(node, query) {\n const selector = query.substring(node.from, node.to);\n const subtree = parser.parse(selector);\n const labelNodes = [];\n let response = '';\n\n subtree.iterate({\n enter: (ref) => {\n const node = ref.node;\n if (node.type.id === Matcher) {\n labelNodes.push(node);\n }\n },\n });\n\n labelNodes.sort((a, b) => {\n const labelNodeA = a.getChild(Identifier);\n const labelNodeB = b.getChild(Identifier);\n\n const labelValueA = labelNodeA && query.substring(labelNodeA.from, labelNodeA.to);\n const labelValueB = labelNodeB && query.substring(labelNodeB.from, labelNodeB.to);\n\n if (!labelValueA || !labelValueB) {\n return 0;\n }\n\n if (labelValueA < labelValueB) {\n return -1;\n }\n\n if (labelValueA > labelValueB) {\n return 1;\n }\n\n return 0;\n });\n\n labelNodes.forEach((node) => {\n const labelNode = node.getChild(Identifier);\n const operatorNode = labelNode ? labelNode.nextSibling : null;\n const valueNode = node.getChild(String);\n\n const label = labelNode ? query.substring(labelNode.from, labelNode.to) : null;\n const operator = operatorNode ? query.substring(operatorNode.from, operatorNode.to) : null;\n const value = valueNode ? query.substring(valueNode.from, valueNode.to) : null;\n\n response += `${label}${operator}${value}, `;\n });\n\n return '{' + trimEnd(response, ', ') + '}';\n}\n\nfunction formatPipelineExpr(node, query) {\n const pipelineExprNodes = [\n LineFilter,\n LabelParser,\n LogfmtParser,\n LabelFilter,\n JsonExpressionParser,\n LineFormatExpr,\n LabelFormatExpr,\n DecolorizeExpr,\n ];\n let lastPipelineType;\n let response = '';\n\n iterateNode(node, pipelineExprNodes).forEach((node) => {\n switch (node.type.id) {\n case LineFilter:\n response += buildResponse(LineFilter, lastPipelineType, formatLineFilter(node, query));\n lastPipelineType = LineFilter;\n break;\n\n case LabelParser:\n response += buildResponse(LabelParser, lastPipelineType, formatLabelParser(node, query));\n lastPipelineType = LabelParser;\n break;\n\n case LogfmtParser:\n response += buildResponse(LogfmtParser, lastPipelineType, formatLabelParser(node, query));\n lastPipelineType = LogfmtParser;\n break;\n\n case JsonExpressionParser:\n response += buildResponse(JsonExpressionParser, lastPipelineType, formatJsonExpressionParser(node, query));\n lastPipelineType = JsonExpressionParser;\n break;\n\n case LabelFilter:\n response += buildResponse(LabelFilter, lastPipelineType, formatLabelFilter(node, query));\n lastPipelineType = LabelFilter;\n break;\n\n case LineFormatExpr:\n response += buildResponse(LineFormatExpr, lastPipelineType, formatLineFormatExpr(node, query));\n lastPipelineType = LineFormatExpr;\n break;\n\n case LabelFormatExpr:\n response += buildResponse(LabelFormatExpr, lastPipelineType, formatLabelFormatExpr(node, query));\n lastPipelineType = LabelFormatExpr;\n break;\n\n case DecolorizeExpr:\n response += buildResponse(DecolorizeExpr, lastPipelineType, formatDecolorizeExpr());\n lastPipelineType = DecolorizeExpr;\n break;\n }\n });\n\n return response;\n}\n\nfunction formatLineFilter(node, query) {\n const filterNode = node.getChild(Filter);\n const filterOperationNode = node.getChild(FilterOp);\n const stringNode = node.getChild(String);\n\n const filter = filterNode && query.substring(filterNode.from, filterNode.to);\n const string = stringNode && query.substring(stringNode.from, stringNode.to);\n\n if (filterOperationNode) {\n return `${filter} ip(${string})`;\n }\n return `${filter} ${string}`;\n}\n\nfunction formatLabelParser(node, query) {\n const hasString = node.getChild(String);\n\n if (hasString) {\n const parserNode = node.getChild(Regexp) || node.getChild(Pattern);\n const stringNode = node.getChild(String);\n\n const parser = parserNode && query.substring(parserNode.from, parserNode.to);\n const string = stringNode && query.substring(stringNode.from, stringNode.to);\n\n return `| ${parser}${string}`;\n }\n\n const labelParser = query.substring(node.from, node.to);\n return `| ${labelParser}`;\n}\n\nfunction formatJsonExpressionParser(node, query) {\n const jsonExpressionNodes = iterateNode(node, [LabelExtractionExpression]);\n let response = '';\n\n jsonExpressionNodes.forEach((node) => {\n const identifierNode = node.getChild(Identifier);\n const valueNode = node.getChild(String);\n\n const identifier = identifierNode && query.substring(identifierNode.from, identifierNode.to);\n const value = valueNode && query.substring(valueNode.from, valueNode.to);\n\n response += `${identifier}=${value}, `;\n });\n\n return `| json ${trimEnd(response, ', ')}`;\n}\n\nfunction formatLabelFilter(node, query) {\n const selectedFilter =\n node.getChild(Matcher) ||\n node.getChild(IpLabelFilter) ||\n node.getChild(NumberFilter) ||\n node.getChild(UnitFilter)?.getChild(DurationFilter) ||\n node.getChild(UnitFilter)?.getChild(BytesFilter);\n\n if (!selectedFilter) {\n return '';\n }\n\n const selectedFilterType = selectedFilter.type.id;\n\n const identifierNode = selectedFilter.getChild(Identifier);\n const operatorNode = identifierNode && identifierNode.nextSibling;\n let valueNode;\n\n if (selectedFilterType === DurationFilter) {\n valueNode = selectedFilter.getChild(Duration);\n } else if (selectedFilterType === BytesFilter) {\n valueNode = selectedFilter.getChild(Bytes);\n } else if (selectedFilterType === NumberFilter) {\n valueNode = selectedFilter.getChild(Number);\n } else {\n valueNode = selectedFilter.getChild(String);\n }\n\n const identifier = identifierNode && query.substring(identifierNode.from, identifierNode.to);\n const operator = operatorNode && query.substring(operatorNode.from, operatorNode.to);\n const value = valueNode && query.substring(valueNode.from, valueNode.to);\n\n if (selectedFilterType === IpLabelFilter) {\n return `| ${identifier}${operator}ip(${value})`;\n }\n\n return `| ${identifier}${operator}${value}`;\n}\n\nfunction formatLineFormatExpr(node, query) {\n const stringNode = node.getChild(String);\n const string = stringNode && query.substring(stringNode.from, stringNode.to);\n return `| line_format ${string}`;\n}\n\nfunction formatLabelFormatExpr(node, query) {\n const labelFormatMatcherNodes = iterateNode(node, [LabelFormatMatcher]);\n let response = '| label_format ';\n\n labelFormatMatcherNodes.forEach((labelFormatMatcherNode) => {\n let identifierNode;\n let valueNode;\n\n if (labelFormatMatcherNode.getChildren(Identifier).length === 2) {\n [identifierNode, valueNode] = labelFormatMatcherNode.getChildren(Identifier);\n } else {\n identifierNode = labelFormatMatcherNode.getChild(Identifier);\n valueNode = labelFormatMatcherNode.getChild(String);\n }\n\n const identifier = identifierNode && query.substring(identifierNode.from, identifierNode.to);\n const value = valueNode && query.substring(valueNode.from, valueNode.to);\n\n response += `${identifier}=${value}, `;\n });\n\n return trimEnd(response, ', ');\n}\n\nfunction formatDecolorizeExpr() {\n return `| decolorize`;\n}\n\nconst formatMetricExpr = (node, query) => {\n const { addBrackets, newNode } = needsBrackets(node, MetricExpr);\n node = newNode;\n let formatted = '';\n\n const childNode = node.firstChild;\n switch (childNode && childNode.type.id) {\n case RangeAggregationExpr:\n formatted = formatRangeAggregationExpr(node, query);\n break;\n\n case VectorAggregationExpr:\n formatted = formatVectorAggregationExpr(node, query);\n break;\n\n case BinOpExpr:\n formatted = formatBinOpExpr(node, query);\n break;\n\n case LiteralExpr:\n formatted = formatLiteralExpr(node, query);\n break;\n\n case LabelReplaceExpr:\n formatted = formatLabelReplaceExpr(node, query);\n break;\n\n case VectorExpr:\n formatted = formatVectorExpr(node, query);\n break;\n }\n\n return addBrackets ? '(' + formatted + ')' : formatted;\n};\n\nfunction formatRangeAggregationExpr(node, query) {\n let response = '';\n\n iterateNode(node, [RangeOp, Number, LogRangeExpr, Grouping]).forEach((node) => {\n if (node.parent?.type.id !== RangeAggregationExpr) {\n return;\n }\n\n switch (node.type.id) {\n case RangeOp:\n response += `${query.substring(node.from, node.to)}(\\n`;\n break;\n\n case Number:\n response += `${indent(1) + query.substring(node.from, node.to)},\\n`;\n break;\n\n case LogRangeExpr:\n response += formatLogRangeExpr(node, query);\n break;\n\n case Grouping:\n response += formatGrouping(node, query);\n break;\n }\n });\n\n return response;\n}\n\nfunction formatLogRangeExpr(node, query) {\n const nodes = [];\n let selector = '';\n let pipeline = '';\n let range = '';\n let offset = '';\n let unwrap = '';\n\n iterateNode(node, [Selector, Range, OffsetExpr, UnwrapExpr, PipelineExpr]).forEach((node) => {\n if (node.parent?.type.id !== LogRangeExpr) {\n return;\n }\n\n nodes.push(node);\n\n switch (node.type.id) {\n case Selector: {\n let logExpr = query.substring(node.from, node.to);\n selector += formatSelector({ ...node, from: 0, to: logExpr.length }, logExpr);\n break;\n }\n\n case PipelineExpr:\n pipeline += formatPipelineExpr(node, query);\n break;\n\n case Range:\n range += query.substring(node.from, node.to);\n break;\n\n case OffsetExpr: {\n const durationNode = node.getChild(Duration);\n offset += ` offset ${durationNode ? query.substring(durationNode.from, durationNode.to) : ''}`;\n break;\n }\n\n case UnwrapExpr:\n iterateNode(node, [Identifier, ConvOp, LabelFilter]).forEach((node, _, arr) => {\n switch (node.type.id) {\n case Identifier: {\n if (node.parent?.type.id !== UnwrapExpr) {\n return;\n }\n\n const hasConvOp = arr.find((node) => node.type.id === ConvOp);\n\n if (hasConvOp) {\n return;\n }\n\n unwrap += `| unwrap ${query.substring(node.from, node.to)} `;\n return;\n }\n\n case ConvOp: {\n const identifierNode = arr.find((node) => node.type.id === Identifier);\n const identifier = identifierNode ? query.substring(identifierNode.from, identifierNode.to) : '';\n unwrap += `| unwrap ${query.substring(node.from, node.to)}(${identifier}) `;\n return;\n }\n\n case LabelFilter:\n unwrap += formatLabelFilter(node, query);\n return;\n }\n });\n break;\n }\n });\n\n let response = '';\n nodes.forEach((node, index, array) => {\n const previousNode = array[index - 1];\n\n if (node.type.id === Selector) {\n response += indent(1) + selector;\n }\n\n if (node.type.id === PipelineExpr) {\n response += indentMultiline(pipeline, 1);\n }\n\n if (node.type.id === Range) {\n response += '\\n' + indent(1) + range;\n }\n\n if (node.type.id === OffsetExpr) {\n response += offset;\n }\n\n if (node.type.id === UnwrapExpr) {\n if (previousNode?.type.id !== OffsetExpr && previousNode?.type.id !== Range) {\n response += '\\n' + indent(1) + unwrap;\n } else {\n response += ' ' + unwrap;\n }\n }\n });\n\n return (response += '\\n)');\n}\n\nfunction formatGrouping(node, query) {\n let response = '';\n\n const labels = iterateNode(node, [Identifier]).map((node) => {\n return query.substring(node.from, node.to);\n });\n\n iterateNode(node, [By, Without]).forEach((node) => {\n if (node.parent?.type.id !== Grouping) {\n return;\n }\n\n switch (node.type.id) {\n case By:\n response = ` by (${labels.join(', ')}) `;\n break;\n\n case Without:\n response = ` without (${labels.join(', ')}) `;\n break;\n }\n });\n\n return response;\n}\n\nfunction formatVectorAggregationExpr(node, query) {\n let response = '';\n\n iterateNode(node, [VectorOp, Number, MetricExpr, Grouping]).forEach((node, _, arr) => {\n if (node.parent?.type.id !== VectorAggregationExpr) {\n return;\n }\n\n switch (node.type.id) {\n case VectorOp:\n response += `${query.substring(node.from, node.to)}`;\n break;\n\n case Number:\n response += `(\\n`;\n response += `${indent(1) + query.substring(node.from, node.to)},\\n`;\n break;\n\n case MetricExpr: {\n const hasNumber = arr.find((node) => node.type.id === Number && node.parent?.type.id === VectorAggregationExpr);\n response += hasNumber ? '' : '(\\n';\n\n const metricExpr = query.substring(node.from, node.to);\n const metricNode = getNodeFromQuery(metricExpr, MetricExpr);\n response += indentMultiline(formatMetricExpr(metricNode, metricExpr), 1);\n response += '\\n)';\n break;\n }\n\n case Grouping:\n response += formatGrouping(node, query);\n break;\n }\n });\n\n return response;\n}\n\nfunction formatBinOpExpr(node, query) {\n let operator;\n\n const [leftExpr, rightExpr] = iterateNode(node, [Expr]).map((node, idx) => {\n if (idx === 0) {\n operator = query.substring(node.nextSibling?.from ?? 0, node.nextSibling?.to);\n }\n\n const expr = query.substring(node.from, node.to);\n let expressionNode;\n\n if (isLogsQuery(expr)) {\n expressionNode = getNodeFromQuery(expr, LogExpr);\n return formatLogExpr(expressionNode, expr);\n } else {\n expressionNode = getNodeFromQuery(expr, MetricExpr);\n return formatMetricExpr(expressionNode, expr);\n }\n });\n\n return leftExpr + '\\n' + operator + '\\n' + rightExpr;\n}\n\nfunction formatLiteralExpr(node, query) {\n node = node.getChild(LiteralExpr) ?? node;\n const addNode = node.getChild(Add);\n const subNode = node.getChild(Sub);\n const numberNode = node.getChild(Number);\n\n if (!numberNode) {\n return '';\n }\n\n if (addNode) {\n return `+${query.substring(numberNode.from, numberNode.to)}`;\n }\n\n if (subNode) {\n return `-${query.substring(numberNode.from, numberNode.to)}`;\n }\n\n return query.substring(numberNode.from, numberNode.to);\n}\n\nfunction formatLabelReplaceExpr(node, query) {\n let response = 'label_replace(\\n';\n\n iterateNode(node, [MetricExpr, String]).forEach((node) => {\n if (node.parent?.type.id !== LabelReplaceExpr) {\n return;\n }\n\n if (node.type.id === MetricExpr) {\n const metricExpr = query.substring(node.from, node.to);\n const metricNode = getNodeFromQuery(metricExpr, MetricExpr);\n response += indentMultiline(formatMetricExpr(metricNode, metricExpr), 1) + ',\\n';\n } else {\n response += indent(1) + query.substring(node.from, node.to) + ',\\n';\n }\n });\n\n return trimEnd(response, ',\\n') + '\\n)';\n}\n\nfunction formatVectorExpr(node, query) {\n node = node.getChild(VectorExpr) ?? node;\n const numberNode = node.getChild(Number);\n\n if (!numberNode) {\n return '';\n }\n\n return `vector(${query.substring(numberNode.from, numberNode.to)})`;\n}\n\n/**\n * @experimental This feature is subject to change or removal in future versions.\n */\nconst formatLokiQuery = (query) => {\n const tree = parser.parse(query);\n let formatted = '';\n\n tree.iterate({\n enter: (ref) => {\n const node = ref.node;\n\n if (node.parent?.type.id !== Expr || node.parent?.parent?.type.id === BinOpExpr) {\n return;\n }\n\n switch (node.type.id) {\n case MetricExpr:\n formatted = formatMetricExpr(node, query);\n return false;\n\n case LogExpr:\n formatted = formatLogExpr(node, query);\n return false;\n }\n },\n });\n\n return trimMultiline(formatted);\n};\n\nexport { AbsentOverTime, Add, And, Avg, AvgOverTime, BinOpExpr, BinOpModifier, Bool, Bottomk, By, Bytes, BytesConv, BytesFilter, BytesOverTime, BytesRate, ConvOp, Count, CountOverTime, Decolorize, DecolorizeExpr, Div, Drop, DropLabel, DropLabels, DropLabelsExpr, Duration, DurationConv, DurationFilter, DurationSecondsConv, Eq, Eql, Expr, Filter, FilterOp, FirstOverTime, GroupLeft, GroupRight, Grouping, GroupingLabel, GroupingLabelList, GroupingLabels, Gte, Gtr, Identifier, Ignoring, Ip, IpLabelFilter, Json, JsonExpressionParser, Keep, KeepLabel, KeepLabels, KeepLabelsExpr, LabelExtractionExpression, LabelExtractionExpressionList, LabelFilter, LabelFormat, LabelFormatExpr, LabelFormatMatcher, LabelName, LabelParser, LabelReplace, LabelReplaceExpr, Labels, LabelsFormat, LastOverTime, LineComment, LineFilter, LineFilters, LineFormat, LineFormatExpr, LiteralExpr, LogExpr, LogQL, LogRangeExpr, Logfmt, LogfmtExpressionParser, LogfmtParser, LogfmtParserFlags, Lss, Lte, Matcher, Matchers, Max, MaxOverTime, MetricExpr, Min, MinOverTime, Mod, Mul, Neq, Nre, Number, NumberFilter, Offset, OffsetExpr, On, OnOrIgnoringModifier, Or, OrFilter, ParserFlag, Pattern, Pipe, PipeExact, PipeMatch, PipelineExpr, PipelineStage, Pow, QuantileOverTime, Range, RangeAggregationExpr, RangeOp, Rate, RateCounter, Re, Regexp, Selector, Sort, Sort_Desc, Stddev, StddevOverTime, Stdvar, StdvarOverTime, String, Sub, Sum, SumOverTime, Topk, UnitFilter, Unless, Unpack, Unwrap, UnwrapExpr, Vector, VectorAggregationExpr, VectorExpr, VectorOp, Without, formatLokiQuery, parser };\n"],"names":["RawQuery","query","language","className","theme","styles","getStyles","highlighted","queryTypeOptions","RESOLUTION_OPTIONS","value","LokiOptionFields","props","lineLimitValue","resolution","onRunQuery","runOnBlur","onChange","queryType","onChangeQueryLimit","nextQuery","preprocessMaxLines","onQueryTypeChange","instant","range","rest","onMaxLinesChange","e","onReturnKeyDown","onResolutionChange","option","type","maxLines","Field","MonacoQueryFieldLazy","MonacoQueryFieldWrapper","lastRunValueRef","handleRunQuery","handleBlur","LokiQueryField","override","prevProps","languageProvider","ExtraFieldElement","datasource","history","placeholder","validateQuery","interpolatedQuery","queryLines","parser","interpolatedErrors","parseQuery","parseErrors","queryErrors","interpolatedError","queryError","parseError","findErrorBoundary","isErrorBoundary","nodeRef","node","isEmptyString","errorNode","error","startPos","endPos","line","boundary","placeHolderScopedVars","perf","warned","PROCESS","emitWarning","msg","code","fn","AC","AS","_","warnACPolyfill","reason","printACPolyfillWarning","shouldWarn","TYPE","isPosInt","n","getUintArray","max","ZeroArray","size","Stack","#constructing","HeapCls","s","LRUCache","#max","#maxSize","#dispose","#disposeAfter","#fetchMethod","#size","#calculatedSize","#keyMap","#keyList","#valList","#next","#prev","#head","#tail","#free","#disposed","#sizes","#starts","#ttls","#hasDispose","#hasFetchMethod","#hasDisposeAfter","c","p","#isBackgroundFetch","k","index","options","context","#backgroundFetch","#moveToTail","#indexes","#rindexes","#isStale","ttl","ttlResolution","ttlAutopurge","updateAgeOnGet","updateAgeOnHas","allowStale","dispose","disposeAfter","noDisposeOnSet","noUpdateTTL","maxSize","maxEntrySize","sizeCalculation","fetchMethod","noDeleteOnFetchRejection","noDeleteOnStaleGet","allowStaleOnFetchRejection","allowStaleOnFetchAbort","ignoreFetchAbort","UintArray","#initializeSizeTracking","#initializeTTLTracking","key","ttls","starts","#setItemTTL","start","t","#updateItemAge","#statusTTL","status","cachedNow","getNow","age","sizes","#removeItemSize","#requireSize","v","#addItemSize","#evict","_i","_s","_st","_k","_v","i","#isValidIndex","getOptions","thisp","deleted","entry","remain","arr","setOptions","oldVal","oldValue","dt","task","val","free","head","hasOptions","peekOptions","ac","signal","fetchOpts","cb","updateCache","aborted","ignoreAbort","fetchFail","bf","eb","er","allowStaleAborted","noDelete","pcall","res","rej","fmp","b","fetchOptions","forceRefresh","stale","isStale","staleVal","fetching","#connect","pi","ni","NS_IN_MS","LokiLanguageProvider","initialValues","url","params","timeRange","streamSelector","interpolatedMatch","end","cacheKey","data","values","match","labelBasedQuery","labelMatchers","label","labels","param","nanoseconds","labelName","streamParam","rangeParams","paramCacheKey","labelValues","empty","config","series","DEFAULT_MAX_LINES_SAMPLE","hasLogfmt","hasJSON","hasPack","DEFAULT_WEBSOCKET_CONFIG","WEBSOCKETSUBJECT_INVALID_ERROR_OBJECT","WebSocketSubject","_super","urlConfigOrSource","destination","_this","Observable","Subject","ReplaySubject","operator","sock","subMsg","unsubMsg","messageFilter","self","observer","err","subscription","x","_a","WebSocketCtor","protocol","binaryType","socket","Subscription","evt","_socket","openObserver","queue","Subscriber","serializer","closingObserver","closeObserver","deserializer","subscriber","source","webSocket","parse","uuid","validate","stringToBytes","str","bytes","DNS","URL","v35","name","version","hashfunc","generateUUID","namespace","buf","offset","_namespace","f","y","z","ROTL","sha1","K","H","l","N","M","j","W","a","d","T","UUID_NAMESPACE","appendResponseToBufferedData","response","streams","tsField","lineField","idField","usedUids","stream","allLabelsString","ts","createUid","labelsString","refId","id","newCount","LiveStreams","target","retryInterval","CircularDataFrame","map","retryWhen","attempts","mergeMap","retryAttempt","timer","throwError","finalize","IS_LOKI_LOG_CONTEXT_UI_OPEN","LokiContextUi","row","logContextProvider","updateFilter","onClose","origQuery","runContextQuery","contextFilters","setContextFilters","showPreservedFiltersAppliedNotification","setShowPreservedFiltersAppliedNotification","initialized","setInitialized","loading","setLoading","isOpen","setIsOpen","includePipelineOperations","setIncludePipelineOperations","SHOULD_INCLUDE_PIPELINE_OPERATIONS","timerHandle","previousInitialized","previousContextFilters","isInitialState","filter","enabled","nonIndexed","preservedLabels","LOKI_LOG_CONTEXT_PRESERVED_LABELS","useAsync","initContextFilters","realLabels","realLabelsEnabled","parsedLabels","parsedLabelsEnabled","contextFilterToSelectFilter","contextFilter","showNonIndexedLabels","queryExpr","Alert","Tooltip","Button","Collapse","Icon","Spinner","Label","keys","actionMeta","InlineFieldRow","InlineField","RenderUserContentAsHTML","SortDirection","makeIndex","field","dir","fieldValues","nanos","isAsc","valA","valB","nanoA","nanoB","sortDataFrameByTime","frame","fields","timeField","SortedVector","LogContextProvider","cacheFilters","direction","processResults","result","processedFrames","app","lastValueFrom","makeRequest","REF_ID_STARTER_LOG_ROW_CONTEXT","catchError","switchMap","of","expr","hasParser","nonIndexedLabels","parsedLabel","currentExpr","newExpr","origExpr","allNodePositions","pipelineStagePositions","position","otherNodePositions","pipelineStagePosition","preservedFiltersApplied","allLabels","preservedLabelsString","arePreservedLabelsUsed","newContextFilters","limit","filters","contextTimeBuffer","queryDirection","FieldCache","tsValue","timestamp","preparedExpression","labelNamesRegex","labelValuesRegex","migrateVariableQuery","rawQuery","queryBase","variableOptions","LokiVariableQueryEditor","setType","setLabel","labelOptions","setLabelOptions","setStream","variableQuery","labelNames","newType","onLabelChange","newLabel","onStreamChange","Select","Input","LokiVariableSupport","scopedVars","request","from","getDerivedFields","dataFrame","derivedFieldConfigs","derivedFieldsGrouped","newFields","fieldFromDerivedFieldConfig","labelFields","intersectingKey","logMatch","dataSourceSrv","dataLinks","acc","derivedFieldConfig","dsSettings","makeTableFrames","instantMetricFrames","framesWithRefId","framesByRefId","frames","makeTableFrame","tableTimeField","tableValueField","allLabelNames","valueField","timeArray","valueArray","text","isMetricFrame","setFrameMeta","meta","oldMeta","newMeta","processStreamFrame","custom","newFrame","derivedFields","processStreamsFrames","queryMap","processMetricInstantFrames","processMetricRangeFrames","groupFrames","streamsFrames","metricInstantFrames","metricRangeFrames","improveError","message","transformBackendResult","queries","dataFrames","LokiAnnotationsQueryEditor","annotation","onAnnotationChange","onChangeQuery","queryWithRefId","EditorRow","EditorField","event","getQueryHints","hints","queryWithParser","parserCount","hasPipelineErrorFiltering","hasLevel","levelLikeLabel","splitTimeRange","startTime","endTime","idealRangeDuration","chunkEndTime","chunkStartTime","step","alignedDuration","alignedStartTime","partitionTimeRange","isLogsQuery","originalTimeRange","stepMs","duration","to","adjustTargetsFromResponseState","targets","targetFrame","updatedMaxLines","runSplitGroupedQueries","requests","responseKey","mergedResponse","totalRequests","partition","longestPartition","shouldStop","subquerySubsciption","runNextRequest","requestN","requestGroup","done","nextRequest","nextRequestN","nextRequestGroup","getNextRequestPointers","group","subRequest","partialResponse","combineResponses","updateLoadingFrame","LOADING_FRAME_NAME","loadingFrame","querySupportsSplitting","runSplitQuery","nonSplittingQueries","normalQueries","logQueries","metricQueries","oneDayMs","rangePartitionedLogQueries","rangePartitionedMetricQueries","chunkRangeMs","resolutionPartition","stepMsPartition","calculateStep","q","tap","intervalMs","interval_regex","newStep","safeStep","getLiveStreamKey","msgUint8","hashBuffer","doLokiChannelStream","ds","maxDelta","maxLength","updateFrame","StreamingDataFrame","defer","convertToWebSocketUrl","backend","DEFAULT_MAX_LINES","LOKI_ENDPOINT","REF_ID_DATA_SAMPLES","REF_ID_STARTER_ANNOTATION","REF_ID_STARTER_LOG_VOLUME","REF_ID_STARTER_LOG_SAMPLE","REF_ID_STARTER_STATS","requestId","hideFromInspector","intervalInfo","LokiDatasource","DataSourceWithBackend","instanceSettings","templateSrv","maxDataPoints","liveTarget","settingsData","normalizedQuery","isQuerySuitable","dropErrorExpression","logsVolumeRequest","logsSampleRequest","fixedRequest","streamQueries","streamRequest","merge","logsQueries","subQueries","baseUrl","adhocFilters","expandedQueries","abstractQueries","existingKeys","abstractQuery","labelMatcher","statsForAll","idx","durations","interpolatedVariableQuery","lokiLogsQuery","variable","lokiRegularEscape","lokiSpecialRegexEscape","lodash","expression","labelType","action","parserPositions","labelFilterPositions","lastPosition","tagKeys","titleFormat","textFormat","annotations","splitKeys","DataFrameView","maybeDuplicatedTags","tags","__auto","__interval","__interval_ms","__range","__range_s","__range_ms","exprWithAdHoc","variables","string","defaults","NodePosition","queryHasFilter","getMatchersWithFilter","removeLabelFromQuery","matchers","matcher","removeLabelFilter","removeSelector","pipelineStage","selector","prefix","suffix","matchVisQuery","tree","labelNode","opNode","valueNode","labelValue","addLabelToQuery","streamSelectorPositions","getStreamSelectorPositions","getParserPositions","getLabelFilterPositions","hasStreamSelectorMatchers","getMatcherInStreamPositions","everyStreamSelectorHasMatcher","streamSelectorPosition","matcherPosition","toLabelFilter","lastPositionsPerExpression","getLastPositionPerExpression","addFilterAsLabelFilter","addFilterToStreamSelector","positions","subExpressions","findLeaves","subPositions","subExpression","findLastPosition","addParserToQuery","lineFilterPositions","getLineFiltersPositions","addParser","addNoPipelineErrorToQuery","addLabelFormatToQuery","labelFormat","logQueryPositions","getLogQueryPositions","addLabelFormat","removeCommentsFromQuery","lineCommentPositions","getLineCommentPositions","newQuery","prev","lineCommentPosition","parserNodeTypes","logPartsPositions","pipeline","unwrap","sorted","vectorSelectorPositions","isLast","labelExists","newLabels","positionsToAddAfter","labelFilter","queryPartPositions","addLineFilter","streamSelectorEnd","current","nodes","getAllPositionsInNodeByType","pos","child","getHighlighterExpressionsFromQuery","input","results","getNodesFromQuery","pipeExact","pipeMatch","strings","getStringsFromLineFilter","filterTerm","backtickedTerm","unwrappedFilterTerm","resultTerm","getNormalizedLokiQuery","getLokiQueryType","tagsToObscure","partsToKeep","obfuscate","obfuscatedQuery","queryPart","parseToNodeNamesArray","queryParts","isQueryWithNode","nodeType","nodeTypes","getNodePositionsFromQuery","getNodeFromQuery","isQueryWithError","isQueryWithParser","getParserFromQuery","parsers","isQueryPipelineErrorFiltering","isQueryWithLabelFormat","getLogQueryFromMetricsQuery","selectorNode","pipelineExprNode","pipelineExpr","getLogQueryFromMetricsQueryAtPosition","metricQuery","isQueryWithLabelFilter","isQueryWithLineFilter","isQueryWithRangeVariable","rangeNodes","getStreamSelectorsFromQuery","requestSupportsSplitting","allQueries","isLokiQuery","getLokiQueryFromDataQuery","formatLogqlQuery","isInvalid","transformedQuery","transformationMatches","pattern","formatted","QueryModellerBase","operationDefinitions","innerQueryPlaceholder","Registry","categories","category","op","def","LokiQueryModeller","queryString","operations","operation","binaryQueries","binQuery","leftOperand","binaryQuery","nested","lokiQueryModeller","binaryScalarDefs","binaryScalarOperations","opDef","defaultParams","getSimpleBinaryRenderer","model","innerExpr","bool","UnwrapParamEditor","operationId","queryModeller","state","setState","loadUnwrapOptions","logExpr","samples","getOperationDefinitions","aggregations","opId","aggregationsWithParam","rangeOperations","rangeOperationsWithGrouping","explainOperator","getDefinitionById","checkParamsAreValid","buildVisualQueryFromString","replacedExpr","handleExpression","isEmptyQuery","visQuery","getLabel","getLineFilter","createNotSupportedError","getLabelParser","getLabelFilter","getJsonExpressionParser","getLogfmtParser","getLineFormat","getLabelFormat","handleUnwrapExpr","getDecolorize","handleRangeAggregation","handleVectorAggregation","handleBinary","isIntervalVariableError","handleDropFilter","handleKeepFilter","filterExpr","handleQuotes","orFilter","parserNode","flags","subNode","ipLabelFilter","valueString","renameTo","originalLabel","unwrapExprChild","labelFilterChild","unwrapChild","convOp","identifier","nameNode","funcName","number","rangeValue","grouping","numberNode","metricExpr","operatorToOpName","left","binModifier","getBinaryModifier","right","leftNumber","getLastChildWithSelector","rightNumber","rightBinary","leftMostChild","boolMatcher","children","exploringNode","ErrorId","getLeftMostChild","cur","makeError","getString","variableRegex","replaceVariables","var1","var2","fmt2","var3","fieldPath","fmt3","fmt","varType","varTypeFunc","returnVariables","makeBinOp","hasBool","getAllByType","regexifyLabelValuesQueryString","isLogLineJSON","parsed","LOGFMT_REGEXP","isLogLineLogfmt","isLogLinePacked","dataFrameHasLokiError","dataFrameHasLevelLabel","extractLogParserFromDataFrame","logLines","extractLabelKeysFromDataFrame","labelsArray","labelTypeArray","labelKeys","labelsSet","labelsType","allLabelKeys","extractUnwrapLabelKeysFromDataFrame","obj","extractHasErrorLabelFromDataFrame","labelField","extractLevelLikeLabelFromDataFrame","onDashboardLoadedHandler","dashboardId","orgId","grafanaVersion","lokiQueries","instantQueries","rangeQueries","builderModeQueries","codeModeQueries","queriesWithTemplateVariables","isQueryWithTemplateVariables","queriesWithChangedResolution","isQueryWithChangedResolution","queriesWithChangedLineLimit","isQueryWithChangedLineLimit","queriesWithChangedLegend","isQueryWithChangedLegend","shouldNotReportBasedOnRefId","starters","starter","calculateTotalBytes","totalBytes","byteKey","stat","trackQuery","trackingSettings","extraPayload","trackGroupedQueries","groupedRequests","originalRequest","splittingPayload","total","split_query_partition_size","Json$1","Logfmt$1","Unpack$1","Pattern$1","Regexp$1","Unwrap$1","LabelFormat$1","LineFormat$1","LabelReplace$1","Vector$1","Offset$1","Bool$1","On$1","Ignoring$1","GroupLeft$1","GroupRight$1","Decolorize$1","Drop$1","Keep$1","By$1","Without$1","And$1","Or$1","Unless$1","Sum$1","Avg$1","Count$1","Max$1","Min$1","Stddev$1","Stdvar$1","Bottomk$1","Topk$1","Sort$1","Sort_Desc$1","MetricExpr$1","keywordTokens","specializeIdentifier","contextualKeywordTokens","extendIdentifier","spec_Identifier","stack","Json","Logfmt","Unpack","Pattern","Regexp","Unwrap","LabelFormat","LineFormat","LabelReplace","Vector","Offset","Bool","On","Ignoring","GroupLeft","GroupRight","Decolorize","Drop","Keep","By","Without","And","Or","Unless","Sum","Avg","Count","Max","Min","Stddev","Stdvar","Bottomk","Topk","Sort","Sort_Desc","LineComment","LogQL","Expr","LogExpr","Selector","Matchers","Matcher","Identifier","Eq","String","Neq","Re","Nre","PipelineExpr","PipelineStage","LineFilters","LineFilter","Filter","PipeExact","PipeMatch","FilterOp","Ip","OrFilter","Pipe","LogfmtParser","LogfmtParserFlags","ParserFlag","LabelParser","JsonExpressionParser","LabelExtractionExpressionList","LabelExtractionExpression","LogfmtExpressionParser","LabelFilter","IpLabelFilter","UnitFilter","DurationFilter","Gtr","Duration","Gte","Lss","Lte","Eql","BytesFilter","Bytes","NumberFilter","Number","LineFormatExpr","LabelFormatExpr","LabelsFormat","LabelFormatMatcher","DecolorizeExpr","DropLabelsExpr","DropLabels","DropLabel","KeepLabelsExpr","KeepLabels","KeepLabel","MetricExpr","RangeAggregationExpr","RangeOp","CountOverTime","Rate","RateCounter","BytesOverTime","BytesRate","AvgOverTime","SumOverTime","MinOverTime","MaxOverTime","StddevOverTime","StdvarOverTime","QuantileOverTime","FirstOverTime","LastOverTime","AbsentOverTime","LogRangeExpr","Range","OffsetExpr","UnwrapExpr","ConvOp","BytesConv","DurationConv","DurationSecondsConv","Grouping","Labels","VectorAggregationExpr","VectorOp","BinOpExpr","BinOpModifier","OnOrIgnoringModifier","GroupingLabels","GroupingLabelList","GroupingLabel","LabelName","Add","Sub","Mul","Div","Mod","Pow","LiteralExpr","LabelReplaceExpr","VectorExpr","indent","level","indentMultiline","block","trimMultiline","needsBrackets","childNodeIsSame","addBrackets","iterateNode","lookingFor","buildResponse","pipelineType","lastPipelineType","formattedNode","trimEnd","charactersToTrim","endIndex","formatLogExpr","newNode","ref","formatSelector","formatPipelineExpr","subtree","labelNodes","labelNodeA","labelNodeB","labelValueA","labelValueB","operatorNode","pipelineExprNodes","formatLineFilter","formatLabelParser","formatJsonExpressionParser","formatLabelFilter","formatLineFormatExpr","formatLabelFormatExpr","formatDecolorizeExpr","filterNode","filterOperationNode","stringNode","jsonExpressionNodes","identifierNode","selectedFilter","selectedFilterType","labelFormatMatcherNodes","labelFormatMatcherNode","formatMetricExpr","childNode","formatRangeAggregationExpr","formatVectorAggregationExpr","formatBinOpExpr","formatLiteralExpr","formatLabelReplaceExpr","formatVectorExpr","formatLogRangeExpr","formatGrouping","durationNode","array","previousNode","hasNumber","metricNode","leftExpr","rightExpr","expressionNode","addNode","formatLokiQuery"],"sourceRoot":""}