@grafana/data#FieldCache TypeScript Examples

The following examples show how to use @grafana/data#FieldCache. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: logs_model.ts    From grafana-chinese with Apache License 2.0 6 votes vote down vote up
function getIdField(fieldCache: FieldCache): FieldWithIndex | undefined {
  const idFieldNames = ['id'];
  for (const fieldName of idFieldNames) {
    const idField = fieldCache.getFieldByName(fieldName);
    if (idField) {
      return idField;
    }
  }
  return undefined;
}
Example #2
Source File: datasource.ts    From grafana-chinese with Apache License 2.0 6 votes vote down vote up
prepareLogRowContextQueryTarget = (row: LogRowModel, limit: number, direction: 'BACKWARD' | 'FORWARD') => {
    const query = Object.keys(row.labels)
      .map(label => `${label}="${row.labels[label]}"`)
      .join(',');

    const contextTimeBuffer = 2 * 60 * 60 * 1000; // 2h buffer
    const commonTargetOptions = {
      limit,
      query: `{${query}}`,
      expr: `{${query}}`,
      direction,
    };

    const fieldCache = new FieldCache(row.dataFrame);
    const nsField = fieldCache.getFieldByName('tsNs')!;
    const nsTimestamp = nsField.values.get(row.rowIndex);

    if (direction === 'BACKWARD') {
      return {
        ...commonTargetOptions,
        // convert to ns, we loose some precision here but it is not that important at the far points of the context
        start: row.timeEpochMs - contextTimeBuffer + '000000',
        end: nsTimestamp,
        direction,
      };
    } else {
      return {
        ...commonTargetOptions,
        // start param in Loki API is inclusive so we'll have to filter out the row that this request is based from
        // and any other that were logged in the same ns but before the row. Right now these rows will be lost
        // because the are before but came it he response that should return only rows after.
        start: nsTimestamp,
        // convert to ns, we loose some precision here but it is not that important at the far points of the context
        end: row.timeEpochMs + contextTimeBuffer + '000000',
      };
    }
  };
Example #3
Source File: utils.test.ts    From grafana-chinese with Apache License 2.0 5 votes vote down vote up
describe('Graph utils', () => {
  describe('getMultiSeriesGraphHoverInfo', () => {
    describe('when series datapoints are x-axis aligned', () => {
      it('returns a datapoints that user hovers over', () => {
        const aCache = new FieldCache(aSeries);
        const aValueField = aCache.getFieldByName('value');
        const aTimeField = aCache.getFieldByName('time');
        const bCache = new FieldCache(bSeries);
        const bValueField = bCache.getFieldByName('value');
        const bTimeField = bCache.getFieldByName('time');

        const result = getMultiSeriesGraphHoverInfo([aValueField!, bValueField!], [aTimeField!, bTimeField!], 0);
        expect(result.time).toBe(100);
        expect(result.results[0]).toEqual(
          mockResult('10', 0, 0, getFixedThemedColor(aValueField!), aValueField!.name, 100)
        );
        expect(result.results[1]).toEqual(
          mockResult('30', 0, 1, getFixedThemedColor(bValueField!), bValueField!.name, 100)
        );
      });

      describe('returns the closest datapoints before the hover position', () => {
        it('when hovering right before a datapoint', () => {
          const aCache = new FieldCache(aSeries);
          const aValueField = aCache.getFieldByName('value');
          const aTimeField = aCache.getFieldByName('time');
          const bCache = new FieldCache(bSeries);
          const bValueField = bCache.getFieldByName('value');
          const bTimeField = bCache.getFieldByName('time');

          //  hovering right before middle point
          const result = getMultiSeriesGraphHoverInfo([aValueField!, bValueField!], [aTimeField!, bTimeField!], 199);
          expect(result.time).toBe(100);
          expect(result.results[0]).toEqual(
            mockResult('10', 0, 0, getFixedThemedColor(aValueField!), aValueField!.name, 100)
          );
          expect(result.results[1]).toEqual(
            mockResult('30', 0, 1, getFixedThemedColor(bValueField!), bValueField!.name, 100)
          );
        });

        it('when hovering right after a datapoint', () => {
          const aCache = new FieldCache(aSeries);
          const aValueField = aCache.getFieldByName('value');
          const aTimeField = aCache.getFieldByName('time');
          const bCache = new FieldCache(bSeries);
          const bValueField = bCache.getFieldByName('value');
          const bTimeField = bCache.getFieldByName('time');

          //  hovering right after middle point
          const result = getMultiSeriesGraphHoverInfo([aValueField!, bValueField!], [aTimeField!, bTimeField!], 201);
          expect(result.time).toBe(200);
          expect(result.results[0]).toEqual(
            mockResult('20', 1, 0, getFixedThemedColor(aValueField!), aValueField!.name, 200)
          );
          expect(result.results[1]).toEqual(
            mockResult('60', 1, 1, getFixedThemedColor(bValueField!), bValueField!.name, 200)
          );
        });
      });
    });

    describe('when series x-axes are not aligned', () => {
      // aSeries and cSeries are not aligned
      // cSeries is missing a middle point
      it('hovering over a middle point', () => {
        const aCache = new FieldCache(aSeries);
        const aValueField = aCache.getFieldByName('value');
        const aTimeField = aCache.getFieldByName('time');
        const cCache = new FieldCache(cSeries);
        const cValueField = cCache.getFieldByName('value');
        const cTimeField = cCache.getFieldByName('time');

        // hovering on a middle point
        // aSeries has point at that time, cSeries doesn't
        const result = getMultiSeriesGraphHoverInfo([aValueField!, cValueField!], [aTimeField!, cTimeField!], 200);

        // we expect a time of the hovered point
        expect(result.time).toBe(200);
        // we expect middle point from aSeries (the one we are hovering over)
        expect(result.results[0]).toEqual(
          mockResult('20', 1, 0, getFixedThemedColor(aValueField!), aValueField!.name, 200)
        );
        // we expect closest point before hovered point from cSeries (1st point)
        expect(result.results[1]).toEqual(
          mockResult('30', 0, 1, getFixedThemedColor(cValueField!), cValueField!.name, 100)
        );
      });

      it('hovering right after over the middle point', () => {
        const aCache = new FieldCache(aSeries);
        const aValueField = aCache.getFieldByName('value');
        const aTimeField = aCache.getFieldByName('time');
        const cCache = new FieldCache(cSeries);
        const cValueField = cCache.getFieldByName('value');
        const cTimeField = cCache.getFieldByName('time');

        // aSeries has point at that time, cSeries doesn't
        const result = getMultiSeriesGraphHoverInfo([aValueField!, cValueField!], [aTimeField!, cTimeField!], 201);

        // we expect the time of the closest point before hover
        expect(result.time).toBe(200);
        // we expect the closest datapoint before hover from aSeries
        expect(result.results[0]).toEqual(
          mockResult('20', 1, 0, getFixedThemedColor(aValueField!), aValueField!.name, 200)
        );
        // we expect the closest datapoint before  hover from cSeries (1st point)
        expect(result.results[1]).toEqual(
          mockResult('30', 0, 1, getFixedThemedColor(cValueField!), cValueField!.name, 100)
        );
      });
    });
  });

  describe('findHoverIndexFromData', () => {
    it('returns index of the closest datapoint before hover position', () => {
      const cache = new FieldCache(aSeries);
      const timeField = cache.getFieldByName('time');
      // hovering over 1st datapoint
      expect(findHoverIndexFromData(timeField!, 0)).toBe(0);
      // hovering over right before 2nd datapoint
      expect(findHoverIndexFromData(timeField!, 199)).toBe(0);
      // hovering over 2nd datapoint
      expect(findHoverIndexFromData(timeField!, 200)).toBe(1);
      // hovering over right before 3rd datapoint
      expect(findHoverIndexFromData(timeField!, 299)).toBe(1);
      // hovering over 3rd datapoint
      expect(findHoverIndexFromData(timeField!, 300)).toBe(2);
    });
  });
});
Example #4
Source File: LogRowContextProvider.tsx    From grafana-chinese with Apache License 2.0 5 votes vote down vote up
getRowContexts = async (
  getRowContext: (row: LogRowModel, options?: any) => Promise<DataQueryResponse>,
  row: LogRowModel,
  limit: number
) => {
  const promises = [
    getRowContext(row, {
      limit,
    }),
    getRowContext(row, {
      // The start time is inclusive so we will get the one row we are using as context entry
      limit: limit + 1,
      direction: 'FORWARD',
    }),
  ];

  const results: Array<DataQueryResponse | DataQueryError> = await Promise.all(promises.map(p => p.catch(e => e)));

  return {
    data: results.map(result => {
      const dataResult: DataQueryResponse = result as DataQueryResponse;
      if (!dataResult.data) {
        return [];
      }

      const data: any[] = [];
      for (let index = 0; index < dataResult.data.length; index++) {
        const dataFrame = toDataFrame(dataResult.data[index]);
        const fieldCache = new FieldCache(dataFrame);
        const timestampField: Field<string> = fieldCache.getFieldByName('ts')!;
        const idField: Field<string> | undefined = fieldCache.getFieldByName('id');

        for (let fieldIndex = 0; fieldIndex < timestampField.values.length; fieldIndex++) {
          // TODO: this filtering is datasource dependant so it will make sense to move it there so the API is
          //  to return correct list of lines handling inclusive ranges or how to filter the correct line on the
          //  datasource.

          // Filter out the row that is the one used as a focal point for the context as we will get it in one of the
          // requests.
          if (idField) {
            // For Loki this means we filter only the one row. Issue is we could have other rows logged at the same
            // ns which came before but they come in the response that search for logs after. This means right now
            // we will show those as if they came after. This is not strictly correct but seems better than loosing them
            // and making this correct would mean quite a bit of complexity to shuffle things around and messing up
            //counts.
            if (idField.values.get(fieldIndex) === row.uid) {
              continue;
            }
          } else {
            // Fallback to timestamp. This should not happen right now as this feature is implemented only for loki
            // and that has ID. Later this branch could be used in other DS but mind that this could also filter out
            // logs which were logged in the same timestamp and that can be a problem depending on the precision.
            if (parseInt(timestampField.values.get(fieldIndex), 10) === row.timeEpochMs) {
              continue;
            }
          }

          const lineField: Field<string> = dataFrame.fields.filter(field => field.name === 'line')[0];
          const line = lineField.values.get(fieldIndex); // assuming that both fields have same length

          if (data.length === 0) {
            data[0] = [line];
          } else {
            data[0].push(line);
          }
        }
      }

      return data;
    }),
    errors: results.map(result => {
      const errorResult: DataQueryError = result as DataQueryError;
      if (!errorResult.message) {
        return '';
      }

      return errorResult.message;
    }),
  };
}
Example #5
Source File: logs_model.ts    From grafana-chinese with Apache License 2.0 4 votes vote down vote up
/**
 * Converts dataFrames into LogsModel. This involves merging them into one list, sorting them and computing metadata
 * like common labels.
 */
export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefined {
  if (logSeries.length === 0) {
    return undefined;
  }
  const allLabels: Labels[] = [];

  // Find the fields we care about and collect all labels
  const allSeries: LogFields[] = logSeries.map(series => {
    const fieldCache = new FieldCache(series);

    // Assume the first string field in the dataFrame is the message. This was right so far but probably needs some
    // more explicit checks.
    const stringField = fieldCache.getFirstFieldOfType(FieldType.string);
    if (stringField.labels) {
      allLabels.push(stringField.labels);
    }
    return {
      series,
      timeField: fieldCache.getFirstFieldOfType(FieldType.time),
      stringField,
      logLevelField: fieldCache.getFieldByName('level'),
      idField: getIdField(fieldCache),
    };
  });

  const commonLabels = allLabels.length > 0 ? findCommonLabels(allLabels) : {};

  const rows: LogRowModel[] = [];
  let hasUniqueLabels = false;

  for (const info of allSeries) {
    const { timeField, stringField, logLevelField, idField, series } = info;
    const labels = stringField.labels;
    const uniqueLabels = findUniqueLabels(labels, commonLabels);
    if (Object.keys(uniqueLabels).length > 0) {
      hasUniqueLabels = true;
    }

    let seriesLogLevel: LogLevel | undefined = undefined;
    if (labels && Object.keys(labels).indexOf('level') !== -1) {
      seriesLogLevel = getLogLevelFromKey(labels['level']);
    }

    for (let j = 0; j < series.length; j++) {
      const ts = timeField.values.get(j);
      const time = dateTime(ts);

      const messageValue: unknown = stringField.values.get(j);
      // This should be string but sometimes isn't (eg elastic) because the dataFrame is not strongly typed.
      const message: string = typeof messageValue === 'string' ? messageValue : JSON.stringify(messageValue);

      const hasAnsi = hasAnsiCodes(message);
      const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : [];

      let logLevel = LogLevel.unknown;
      if (logLevelField && logLevelField.values.get(j)) {
        logLevel = getLogLevelFromKey(logLevelField.values.get(j));
      } else if (seriesLogLevel) {
        logLevel = seriesLogLevel;
      } else {
        logLevel = getLogLevel(message);
      }

      rows.push({
        entryFieldIndex: stringField.index,
        rowIndex: j,
        dataFrame: series,
        logLevel,
        timeFromNow: time.fromNow(),
        timeEpochMs: time.valueOf(),
        timeLocal: time.format(logTimeFormat),
        timeUtc: toUtc(time.valueOf()).format(logTimeFormat),
        uniqueLabels,
        hasAnsi,
        searchWords,
        entry: hasAnsi ? ansicolor.strip(message) : message,
        raw: message,
        labels: stringField.labels,
        uid: idField ? idField.values.get(j) : j.toString(),
      });
    }
  }

  const deduplicatedLogRows = deduplicateLogRowsById(rows);

  // Meta data to display in status
  const meta: LogsMetaItem[] = [];
  if (_.size(commonLabels) > 0) {
    meta.push({
      label: 'Common labels',
      value: commonLabels,
      kind: LogsMetaKind.LabelsMap,
    });
  }

  const limits = logSeries.filter(series => series.meta && series.meta.limit);
  const limitValue = Object.values(
    limits.reduce((acc: any, elem: any) => {
      acc[elem.refId] = elem.meta.limit;
      return acc;
    }, {})
  ).reduce((acc: number, elem: any) => (acc += elem), 0);

  if (limits.length > 0) {
    meta.push({
      label: 'Limit',
      value: `${limitValue} (${deduplicatedLogRows.length} returned)`,
      kind: LogsMetaKind.String,
    });
  }

  return {
    hasUniqueLabels,
    meta,
    rows: deduplicatedLogRows,
  };
}
Example #6
Source File: elastic_response.test.ts    From grafana-chinese with Apache License 2.0 4 votes vote down vote up
describe('ElasticResponse', () => {
  let targets;
  let response: any;
  let result: any;

  describe('simple query and count', () => {
    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [{ type: 'count', id: '1' }],
          bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }],
        },
      ];
      response = {
        responses: [
          {
            aggregations: {
              '2': {
                buckets: [
                  {
                    doc_count: 10,
                    key: 1000,
                  },
                  {
                    doc_count: 15,
                    key: 2000,
                  },
                ],
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should return 1 series', () => {
      expect(result.data.length).toBe(1);
      expect(result.data[0].target).toBe('Count');
      expect(result.data[0].datapoints.length).toBe(2);
      expect(result.data[0].datapoints[0][0]).toBe(10);
      expect(result.data[0].datapoints[0][1]).toBe(1000);
    });
  });

  describe('simple query count & avg aggregation', () => {
    let result: any;

    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [
            { type: 'count', id: '1' },
            { type: 'avg', field: 'value', id: '2' },
          ],
          bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }],
        },
      ];
      response = {
        responses: [
          {
            aggregations: {
              '3': {
                buckets: [
                  {
                    '2': { value: 88 },
                    doc_count: 10,
                    key: 1000,
                  },
                  {
                    '2': { value: 99 },
                    doc_count: 15,
                    key: 2000,
                  },
                ],
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should return 2 series', () => {
      expect(result.data.length).toBe(2);
      expect(result.data[0].datapoints.length).toBe(2);
      expect(result.data[0].datapoints[0][0]).toBe(10);
      expect(result.data[0].datapoints[0][1]).toBe(1000);

      expect(result.data[1].target).toBe('Average value');
      expect(result.data[1].datapoints[0][0]).toBe(88);
      expect(result.data[1].datapoints[1][0]).toBe(99);
    });
  });

  describe('single group by query one metric', () => {
    let result: any;

    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [{ type: 'count', id: '1' }],
          bucketAggs: [
            { type: 'terms', field: 'host', id: '2' },
            { type: 'date_histogram', field: '@timestamp', id: '3' },
          ],
        },
      ];
      response = {
        responses: [
          {
            aggregations: {
              '2': {
                buckets: [
                  {
                    '3': {
                      buckets: [
                        { doc_count: 1, key: 1000 },
                        { doc_count: 3, key: 2000 },
                      ],
                    },
                    doc_count: 4,
                    key: 'server1',
                  },
                  {
                    '3': {
                      buckets: [
                        { doc_count: 2, key: 1000 },
                        { doc_count: 8, key: 2000 },
                      ],
                    },
                    doc_count: 10,
                    key: 'server2',
                  },
                ],
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should return 2 series', () => {
      expect(result.data.length).toBe(2);
      expect(result.data[0].datapoints.length).toBe(2);
      expect(result.data[0].target).toBe('server1');
      expect(result.data[1].target).toBe('server2');
    });
  });

  describe('single group by query two metrics', () => {
    let result: any;

    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [
            { type: 'count', id: '1' },
            { type: 'avg', field: '@value', id: '4' },
          ],
          bucketAggs: [
            { type: 'terms', field: 'host', id: '2' },
            { type: 'date_histogram', field: '@timestamp', id: '3' },
          ],
        },
      ];
      response = {
        responses: [
          {
            aggregations: {
              '2': {
                buckets: [
                  {
                    '3': {
                      buckets: [
                        { '4': { value: 10 }, doc_count: 1, key: 1000 },
                        { '4': { value: 12 }, doc_count: 3, key: 2000 },
                      ],
                    },
                    doc_count: 4,
                    key: 'server1',
                  },
                  {
                    '3': {
                      buckets: [
                        { '4': { value: 20 }, doc_count: 1, key: 1000 },
                        { '4': { value: 32 }, doc_count: 3, key: 2000 },
                      ],
                    },
                    doc_count: 10,
                    key: 'server2',
                  },
                ],
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should return 2 series', () => {
      expect(result.data.length).toBe(4);
      expect(result.data[0].datapoints.length).toBe(2);
      expect(result.data[0].target).toBe('server1 Count');
      expect(result.data[1].target).toBe('server1 Average @value');
      expect(result.data[2].target).toBe('server2 Count');
      expect(result.data[3].target).toBe('server2 Average @value');
    });
  });

  describe('with percentiles ', () => {
    let result: any;

    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [{ type: 'percentiles', settings: { percents: [75, 90] }, id: '1' }],
          bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '3' }],
        },
      ];
      response = {
        responses: [
          {
            aggregations: {
              '3': {
                buckets: [
                  {
                    '1': { values: { '75': 3.3, '90': 5.5 } },
                    doc_count: 10,
                    key: 1000,
                  },
                  {
                    '1': { values: { '75': 2.3, '90': 4.5 } },
                    doc_count: 15,
                    key: 2000,
                  },
                ],
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should return 2 series', () => {
      expect(result.data.length).toBe(2);
      expect(result.data[0].datapoints.length).toBe(2);
      expect(result.data[0].target).toBe('p75');
      expect(result.data[1].target).toBe('p90');
      expect(result.data[0].datapoints[0][0]).toBe(3.3);
      expect(result.data[0].datapoints[0][1]).toBe(1000);
      expect(result.data[1].datapoints[1][0]).toBe(4.5);
    });
  });

  describe('with extended_stats', () => {
    let result: any;

    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [
            {
              type: 'extended_stats',
              meta: { max: true, std_deviation_bounds_upper: true },
              id: '1',
            },
          ],
          bucketAggs: [
            { type: 'terms', field: 'host', id: '3' },
            { type: 'date_histogram', id: '4' },
          ],
        },
      ];
      response = {
        responses: [
          {
            aggregations: {
              '3': {
                buckets: [
                  {
                    key: 'server1',
                    '4': {
                      buckets: [
                        {
                          '1': {
                            max: 10.2,
                            min: 5.5,
                            std_deviation_bounds: { upper: 3, lower: -2 },
                          },
                          doc_count: 10,
                          key: 1000,
                        },
                      ],
                    },
                  },
                  {
                    key: 'server2',
                    '4': {
                      buckets: [
                        {
                          '1': {
                            max: 10.2,
                            min: 5.5,
                            std_deviation_bounds: { upper: 3, lower: -2 },
                          },
                          doc_count: 10,
                          key: 1000,
                        },
                      ],
                    },
                  },
                ],
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should return 4 series', () => {
      expect(result.data.length).toBe(4);
      expect(result.data[0].datapoints.length).toBe(1);
      expect(result.data[0].target).toBe('server1 Max');
      expect(result.data[1].target).toBe('server1 Std Dev Upper');

      expect(result.data[0].datapoints[0][0]).toBe(10.2);
      expect(result.data[1].datapoints[0][0]).toBe(3);
    });
  });

  describe('single group by with alias pattern', () => {
    let result: any;

    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [{ type: 'count', id: '1' }],
          alias: '{{term @host}} {{metric}} and {{not_exist}} {{@host}}',
          bucketAggs: [
            { type: 'terms', field: '@host', id: '2' },
            { type: 'date_histogram', field: '@timestamp', id: '3' },
          ],
        },
      ];
      response = {
        responses: [
          {
            aggregations: {
              '2': {
                buckets: [
                  {
                    '3': {
                      buckets: [
                        { doc_count: 1, key: 1000 },
                        { doc_count: 3, key: 2000 },
                      ],
                    },
                    doc_count: 4,
                    key: 'server1',
                  },
                  {
                    '3': {
                      buckets: [
                        { doc_count: 2, key: 1000 },
                        { doc_count: 8, key: 2000 },
                      ],
                    },
                    doc_count: 10,
                    key: 'server2',
                  },
                  {
                    '3': {
                      buckets: [
                        { doc_count: 2, key: 1000 },
                        { doc_count: 8, key: 2000 },
                      ],
                    },
                    doc_count: 10,
                    key: 0,
                  },
                ],
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should return 2 series', () => {
      expect(result.data.length).toBe(3);
      expect(result.data[0].datapoints.length).toBe(2);
      expect(result.data[0].target).toBe('server1 Count and {{not_exist}} server1');
      expect(result.data[1].target).toBe('server2 Count and {{not_exist}} server2');
      expect(result.data[2].target).toBe('0 Count and {{not_exist}} 0');
    });
  });

  describe('histogram response', () => {
    let result: any;

    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [{ type: 'count', id: '1' }],
          bucketAggs: [{ type: 'histogram', field: 'bytes', id: '3' }],
        },
      ];
      response = {
        responses: [
          {
            aggregations: {
              '3': {
                buckets: [
                  { doc_count: 1, key: 1000 },
                  { doc_count: 3, key: 2000 },
                  { doc_count: 2, key: 1000 },
                ],
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should return table with byte and count', () => {
      expect(result.data[0].rows.length).toBe(3);
      expect(result.data[0].columns).toEqual([{ text: 'bytes', filterable: true }, { text: 'Count' }]);
    });
  });

  describe('with two filters agg', () => {
    let result: any;

    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [{ type: 'count', id: '1' }],
          bucketAggs: [
            {
              id: '2',
              type: 'filters',
              settings: {
                filters: [{ query: '@metric:cpu' }, { query: '@metric:logins.count' }],
              },
            },
            { type: 'date_histogram', field: '@timestamp', id: '3' },
          ],
        },
      ];
      response = {
        responses: [
          {
            aggregations: {
              '2': {
                buckets: {
                  '@metric:cpu': {
                    '3': {
                      buckets: [
                        { doc_count: 1, key: 1000 },
                        { doc_count: 3, key: 2000 },
                      ],
                    },
                  },
                  '@metric:logins.count': {
                    '3': {
                      buckets: [
                        { doc_count: 2, key: 1000 },
                        { doc_count: 8, key: 2000 },
                      ],
                    },
                  },
                },
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should return 2 series', () => {
      expect(result.data.length).toBe(2);
      expect(result.data[0].datapoints.length).toBe(2);
      expect(result.data[0].target).toBe('@metric:cpu');
      expect(result.data[1].target).toBe('@metric:logins.count');
    });
  });

  describe('with dropfirst and last aggregation', () => {
    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [{ type: 'avg', id: '1' }, { type: 'count' }],
          bucketAggs: [
            {
              id: '2',
              type: 'date_histogram',
              field: 'host',
              settings: { trimEdges: 1 },
            },
          ],
        },
      ];

      response = {
        responses: [
          {
            aggregations: {
              '2': {
                buckets: [
                  {
                    '1': { value: 1000 },
                    key: 1,
                    doc_count: 369,
                  },
                  {
                    '1': { value: 2000 },
                    key: 2,
                    doc_count: 200,
                  },
                  {
                    '1': { value: 2000 },
                    key: 3,
                    doc_count: 200,
                  },
                ],
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should remove first and last value', () => {
      expect(result.data.length).toBe(2);
      expect(result.data[0].datapoints.length).toBe(1);
    });
  });

  describe('No group by time', () => {
    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [{ type: 'avg', id: '1' }, { type: 'count' }],
          bucketAggs: [{ id: '2', type: 'terms', field: 'host' }],
        },
      ];

      response = {
        responses: [
          {
            aggregations: {
              '2': {
                buckets: [
                  {
                    '1': { value: 1000 },
                    key: 'server-1',
                    doc_count: 369,
                  },
                  {
                    '1': { value: 2000 },
                    key: 'server-2',
                    doc_count: 200,
                  },
                ],
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should return table', () => {
      expect(result.data.length).toBe(1);
      expect(result.data[0].type).toBe('table');
      expect(result.data[0].rows.length).toBe(2);
      expect(result.data[0].rows[0][0]).toBe('server-1');
      expect(result.data[0].rows[0][1]).toBe(1000);
      expect(result.data[0].rows[0][2]).toBe(369);

      expect(result.data[0].rows[1][0]).toBe('server-2');
      expect(result.data[0].rows[1][1]).toBe(2000);
    });
  });

  describe('No group by time with percentiles ', () => {
    let result: any;

    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [{ type: 'percentiles', field: 'value', settings: { percents: [75, 90] }, id: '1' }],
          bucketAggs: [{ type: 'term', field: 'id', id: '3' }],
        },
      ];
      response = {
        responses: [
          {
            aggregations: {
              '3': {
                buckets: [
                  {
                    '1': { values: { '75': 3.3, '90': 5.5 } },
                    doc_count: 10,
                    key: 'id1',
                  },
                  {
                    '1': { values: { '75': 2.3, '90': 4.5 } },
                    doc_count: 15,
                    key: 'id2',
                  },
                ],
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should return table', () => {
      expect(result.data.length).toBe(1);
      expect(result.data[0].type).toBe('table');
      expect(result.data[0].columns[0].text).toBe('id');
      expect(result.data[0].columns[1].text).toBe('p75 value');
      expect(result.data[0].columns[2].text).toBe('p90 value');
      expect(result.data[0].rows.length).toBe(2);
      expect(result.data[0].rows[0][0]).toBe('id1');
      expect(result.data[0].rows[0][1]).toBe(3.3);
      expect(result.data[0].rows[0][2]).toBe(5.5);
      expect(result.data[0].rows[1][0]).toBe('id2');
      expect(result.data[0].rows[1][1]).toBe(2.3);
      expect(result.data[0].rows[1][2]).toBe(4.5);
    });
  });

  describe('Multiple metrics of same type', () => {
    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [
            { type: 'avg', id: '1', field: 'test' },
            { type: 'avg', id: '2', field: 'test2' },
          ],
          bucketAggs: [{ id: '2', type: 'terms', field: 'host' }],
        },
      ];

      response = {
        responses: [
          {
            aggregations: {
              '2': {
                buckets: [
                  {
                    '1': { value: 1000 },
                    '2': { value: 3000 },
                    key: 'server-1',
                    doc_count: 369,
                  },
                ],
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should include field in metric name', () => {
      expect(result.data[0].type).toBe('table');
      expect(result.data[0].rows[0][1]).toBe(1000);
      expect(result.data[0].rows[0][2]).toBe(3000);
    });
  });

  describe('Raw documents query', () => {
    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [{ type: 'raw_document', id: '1' }],
          bucketAggs: [],
        },
      ];
      response = {
        responses: [
          {
            hits: {
              total: 100,
              hits: [
                {
                  _id: '1',
                  _type: 'type',
                  _index: 'index',
                  _source: { sourceProp: 'asd' },
                  fields: { fieldProp: 'field' },
                },
                {
                  _source: { sourceProp: 'asd2' },
                  fields: { fieldProp: 'field2' },
                },
              ],
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should return docs', () => {
      expect(result.data.length).toBe(1);
      expect(result.data[0].type).toBe('docs');
      expect(result.data[0].total).toBe(100);
      expect(result.data[0].datapoints.length).toBe(2);
      expect(result.data[0].datapoints[0].sourceProp).toBe('asd');
      expect(result.data[0].datapoints[0].fieldProp).toBe('field');
    });
  });

  describe('with bucket_script ', () => {
    let result: any;

    beforeEach(() => {
      targets = [
        {
          refId: 'A',
          metrics: [
            { id: '1', type: 'sum', field: '@value' },
            { id: '3', type: 'max', field: '@value' },
            {
              id: '4',
              field: 'select field',
              pipelineVariables: [
                { name: 'var1', pipelineAgg: '1' },
                { name: 'var2', pipelineAgg: '3' },
              ],
              settings: { script: 'params.var1 * params.var2' },
              type: 'bucket_script',
            },
          ],
          bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '2' }],
        },
      ];
      response = {
        responses: [
          {
            aggregations: {
              '2': {
                buckets: [
                  {
                    1: { value: 2 },
                    3: { value: 3 },
                    4: { value: 6 },
                    doc_count: 60,
                    key: 1000,
                  },
                  {
                    1: { value: 3 },
                    3: { value: 4 },
                    4: { value: 12 },
                    doc_count: 60,
                    key: 2000,
                  },
                ],
              },
            },
          },
        ],
      };

      result = new ElasticResponse(targets, response).getTimeSeries();
    });

    it('should return 3 series', () => {
      expect(result.data.length).toBe(3);
      expect(result.data[0].datapoints.length).toBe(2);
      expect(result.data[0].target).toBe('Sum @value');
      expect(result.data[1].target).toBe('Max @value');
      expect(result.data[2].target).toBe('Sum @value * Max @value');
      expect(result.data[0].datapoints[0][0]).toBe(2);
      expect(result.data[1].datapoints[0][0]).toBe(3);
      expect(result.data[2].datapoints[0][0]).toBe(6);
      expect(result.data[0].datapoints[1][0]).toBe(3);
      expect(result.data[1].datapoints[1][0]).toBe(4);
      expect(result.data[2].datapoints[1][0]).toBe(12);
    });
  });

  describe('simple logs query and count', () => {
    const targets: any = [
      {
        refId: 'A',
        metrics: [{ type: 'count', id: '1' }],
        bucketAggs: [{ type: 'date_histogram', settings: { interval: 'auto' }, id: '2' }],
        context: 'explore',
        interval: '10s',
        isLogsQuery: true,
        key: 'Q-1561369883389-0.7611823271062786-0',
        liveStreaming: false,
        maxDataPoints: 1620,
        query: '',
        timeField: '@timestamp',
      },
    ];
    const response = {
      responses: [
        {
          aggregations: {
            '2': {
              buckets: [
                {
                  doc_count: 10,
                  key: 1000,
                },
                {
                  doc_count: 15,
                  key: 2000,
                },
              ],
            },
          },
          hits: {
            hits: [
              {
                _id: 'fdsfs',
                _type: '_doc',
                _index: 'mock-index',
                _source: {
                  '@timestamp': '2019-06-24T09:51:19.765Z',
                  host: 'djisaodjsoad',
                  message: 'hello, i am a message',
                  level: 'debug',
                  fields: {
                    lvl: 'debug',
                  },
                },
              },
              {
                _id: 'kdospaidopa',
                _type: '_doc',
                _index: 'mock-index',
                _source: {
                  '@timestamp': '2019-06-24T09:52:19.765Z',
                  host: 'dsalkdakdop',
                  message: 'hello, i am also message',
                  level: 'error',
                  fields: {
                    lvl: 'info',
                  },
                },
              },
            ],
          },
        },
      ],
    };

    it('should return histogram aggregation and documents', () => {
      const result = new ElasticResponse(targets, response).getLogs();
      expect(result.data.length).toBe(2);
      const logResults = result.data[0] as MutableDataFrame;
      const fields = logResults.fields.map(f => {
        return {
          name: f.name,
          type: f.type,
        };
      });

      expect(fields).toContainEqual({ name: '@timestamp', type: 'time' });
      expect(fields).toContainEqual({ name: 'host', type: 'string' });
      expect(fields).toContainEqual({ name: 'message', type: 'string' });

      let rows = new DataFrameView(logResults);
      for (let i = 0; i < rows.length; i++) {
        const r = rows.get(i);
        expect(r._id).toEqual(response.responses[0].hits.hits[i]._id);
        expect(r._type).toEqual(response.responses[0].hits.hits[i]._type);
        expect(r._index).toEqual(response.responses[0].hits.hits[i]._index);
        expect(r._source).toEqual(flatten(response.responses[0].hits.hits[i]._source, null));
      }

      // Make a map from the histogram results
      const hist: KeyValue<number> = {};
      const histogramResults = new MutableDataFrame(result.data[1]);
      rows = new DataFrameView(histogramResults);
      for (let i = 0; i < rows.length; i++) {
        const row = rows.get(i);
        hist[row.Time] = row.Count;
      }

      response.responses[0].aggregations['2'].buckets.forEach((bucket: any) => {
        expect(hist[bucket.key]).toEqual(bucket.doc_count);
      });
    });

    it('should map levels field', () => {
      const result = new ElasticResponse(targets, response).getLogs(undefined, 'level');
      const fieldCache = new FieldCache(result.data[0]);
      const field = fieldCache.getFieldByName('level');
      expect(field.values.toArray()).toEqual(['debug', 'error']);
    });

    it('should re map levels field to new field', () => {
      const result = new ElasticResponse(targets, response).getLogs(undefined, 'fields.lvl');
      const fieldCache = new FieldCache(result.data[0]);
      const field = fieldCache.getFieldByName('level');
      expect(field.values.toArray()).toEqual(['debug', 'info']);
    });
  });
});
Example #7
Source File: datasource.test.ts    From grafana-chinese with Apache License 2.0 4 votes vote down vote up
describe('LokiDatasource', () => {
  const instanceSettings: any = {
    url: 'myloggingurl',
  };

  const legacyTestResp: { data: LokiLegacyStreamResponse; status: number } = {
    data: {
      streams: [
        {
          entries: [{ ts: '2019-02-01T10:27:37.498180581Z', line: 'hello' }],
          labels: '{}',
        },
      ],
    },
    status: 404, // for simulating legacy endpoint
  };

  const testResp: { data: LokiResponse } = {
    data: {
      data: {
        resultType: LokiResultType.Stream,
        result: [
          {
            stream: {},
            values: [['1573646419522934000', 'hello']],
          },
        ],
      },
      status: 'success',
    },
  };

  beforeEach(() => {
    jest.clearAllMocks();
    datasourceRequestMock.mockImplementation(() => Promise.resolve());
  });

  const templateSrvMock = ({
    getAdhocFilters: (): any[] => [],
    replace: (a: string) => a,
  } as unknown) as TemplateSrv;

  describe('when creating range query', () => {
    let ds: LokiDatasource;
    let adjustIntervalSpy: jest.SpyInstance;
    beforeEach(() => {
      const customData = { ...(instanceSettings.jsonData || {}), maxLines: 20 };
      const customSettings = { ...instanceSettings, jsonData: customData };
      ds = new LokiDatasource(customSettings, templateSrvMock);
      adjustIntervalSpy = jest.spyOn(ds, 'adjustInterval');
    });

    it('should use default intervalMs if one is not provided', () => {
      const target = { expr: '{job="grafana"}', refId: 'B' };
      const raw = { from: 'now', to: 'now-1h' };
      const range = { from: dateTime(), to: dateTime(), raw: raw };
      const options = {
        range,
      };

      const req = ds.createRangeQuery(target, options);
      expect(req.start).toBeDefined();
      expect(req.end).toBeDefined();
      expect(adjustIntervalSpy).toHaveBeenCalledWith(1000, expect.anything());
    });

    it('should use provided intervalMs', () => {
      const target = { expr: '{job="grafana"}', refId: 'B' };
      const raw = { from: 'now', to: 'now-1h' };
      const range = { from: dateTime(), to: dateTime(), raw: raw };
      const options = {
        range,
        intervalMs: 2000,
      };

      const req = ds.createRangeQuery(target, options);
      expect(req.start).toBeDefined();
      expect(req.end).toBeDefined();
      expect(adjustIntervalSpy).toHaveBeenCalledWith(2000, expect.anything());
    });
  });

  describe('when running range query with fallback', () => {
    let ds: LokiDatasource;
    beforeEach(() => {
      const customData = { ...(instanceSettings.jsonData || {}), maxLines: 20 };
      const customSettings = { ...instanceSettings, jsonData: customData };
      ds = new LokiDatasource(customSettings, templateSrvMock);
      datasourceRequestMock.mockImplementation(() => Promise.resolve(legacyTestResp));
    });

    test('should try latest endpoint but fall back to legacy endpoint if it cannot be reached', async () => {
      const options = getQueryOptions<LokiQuery>({
        targets: [{ expr: '{job="grafana"}', refId: 'B' }],
        exploreMode: ExploreMode.Logs,
      });

      ds.runLegacyQuery = jest.fn();
      await ds.runRangeQueryWithFallback(options.targets[0], options).toPromise();
      expect(ds.runLegacyQuery).toBeCalled();
    });
  });

  describe('when querying', () => {
    let ds: LokiDatasource;
    let testLimit: any;

    beforeAll(() => {
      testLimit = makeLimitTest(instanceSettings, datasourceRequestMock, templateSrvMock, legacyTestResp);
    });

    beforeEach(() => {
      const customData = { ...(instanceSettings.jsonData || {}), maxLines: 20 };
      const customSettings = { ...instanceSettings, jsonData: customData };
      ds = new LokiDatasource(customSettings, templateSrvMock);
      datasourceRequestMock.mockImplementation(() => Promise.resolve(testResp));
    });

    test('should run instant query and range query when in metrics mode', async () => {
      const options = getQueryOptions<LokiQuery>({
        targets: [{ expr: 'rate({job="grafana"}[5m])', refId: 'A' }],
        exploreMode: ExploreMode.Metrics,
      });

      ds.runInstantQuery = jest.fn(() => of({ data: [] }));
      ds.runLegacyQuery = jest.fn();
      ds.runRangeQueryWithFallback = jest.fn(() => of({ data: [] }));
      await ds.query(options).toPromise();

      expect(ds.runInstantQuery).toBeCalled();
      expect(ds.runLegacyQuery).not.toBeCalled();
      expect(ds.runRangeQueryWithFallback).toBeCalled();
    });

    test('should just run range query when in logs mode', async () => {
      const options = getQueryOptions<LokiQuery>({
        targets: [{ expr: '{job="grafana"}', refId: 'B' }],
        exploreMode: ExploreMode.Logs,
      });

      ds.runInstantQuery = jest.fn(() => of({ data: [] }));
      ds.runRangeQueryWithFallback = jest.fn(() => of({ data: [] }));
      await ds.query(options).toPromise();

      expect(ds.runInstantQuery).not.toBeCalled();
      expect(ds.runRangeQueryWithFallback).toBeCalled();
    });

    test('should use default max lines when no limit given', () => {
      testLimit({
        expectedLimit: 1000,
      });
    });

    test('should use custom max lines if limit is set', () => {
      testLimit({
        maxLines: 20,
        expectedLimit: 20,
      });
    });

    test('should use custom maxDataPoints if set in request', () => {
      testLimit({
        maxDataPoints: 500,
        expectedLimit: 500,
      });
    });

    test('should use datasource maxLimit if maxDataPoints is higher', () => {
      testLimit({
        maxLines: 20,
        maxDataPoints: 500,
        expectedLimit: 20,
      });
    });

    test('should return series data', async () => {
      const customData = { ...(instanceSettings.jsonData || {}), maxLines: 20 };
      const customSettings = { ...instanceSettings, jsonData: customData };
      const ds = new LokiDatasource(customSettings, templateSrvMock);
      datasourceRequestMock.mockImplementation(
        jest
          .fn()
          .mockReturnValueOnce(Promise.resolve(legacyTestResp))
          .mockReturnValueOnce(Promise.resolve(omit(legacyTestResp, 'status')))
      );

      const options = getQueryOptions<LokiQuery>({
        targets: [{ expr: '{job="grafana"} |= "foo"', refId: 'B' }],
      });

      const res = await ds.query(options).toPromise();

      const dataFrame = res.data[0] as DataFrame;
      const fieldCache = new FieldCache(dataFrame);
      expect(fieldCache.getFieldByName('line').values.get(0)).toBe('hello');
      expect(dataFrame.meta.limit).toBe(20);
      expect(dataFrame.meta.searchWords).toEqual(['foo']);
    });
  });

  describe('When interpolating variables', () => {
    let ds: LokiDatasource;
    let variable: CustomVariable;

    beforeEach(() => {
      const customData = { ...(instanceSettings.jsonData || {}), maxLines: 20 };
      const customSettings = { ...instanceSettings, jsonData: customData };
      ds = new LokiDatasource(customSettings, templateSrvMock);
      variable = new CustomVariable({}, {} as any);
    });

    it('should only escape single quotes', () => {
      expect(ds.interpolateQueryExpr("abc'$^*{}[]+?.()|", variable)).toEqual("abc\\\\'$^*{}[]+?.()|");
    });

    it('should return a number', () => {
      expect(ds.interpolateQueryExpr(1000, variable)).toEqual(1000);
    });

    describe('and variable allows multi-value', () => {
      beforeEach(() => {
        variable.multi = true;
      });

      it('should regex escape values if the value is a string', () => {
        expect(ds.interpolateQueryExpr('looking*glass', variable)).toEqual('looking\\\\*glass');
      });

      it('should return pipe separated values if the value is an array of strings', () => {
        expect(ds.interpolateQueryExpr(['a|bc', 'de|f'], variable)).toEqual('a\\\\|bc|de\\\\|f');
      });
    });

    describe('and variable allows all', () => {
      beforeEach(() => {
        variable.includeAll = true;
      });

      it('should regex escape values if the array is a string', () => {
        expect(ds.interpolateQueryExpr('looking*glass', variable)).toEqual('looking\\\\*glass');
      });

      it('should return pipe separated values if the value is an array of strings', () => {
        expect(ds.interpolateQueryExpr(['a|bc', 'de|f'], variable)).toEqual('a\\\\|bc|de\\\\|f');
      });
    });
  });

  describe('when performing testDataSource', () => {
    let ds: DataSourceApi<any, any>;
    let result: any;

    describe('and call succeeds', () => {
      beforeEach(async () => {
        datasourceRequestMock.mockImplementation(async () => {
          return Promise.resolve({
            status: 200,
            data: {
              values: ['avalue'],
            },
          });
        });
        ds = new LokiDatasource(instanceSettings, {} as TemplateSrv);
        result = await ds.testDatasource();
      });

      it('should return successfully', () => {
        expect(result.status).toBe('success');
      });
    });

    describe('and call fails with 401 error', () => {
      let ds: LokiDatasource;
      beforeEach(() => {
        datasourceRequestMock.mockImplementation(() =>
          Promise.reject({
            statusText: 'Unauthorized',
            status: 401,
            data: {
              message: 'Unauthorized',
            },
          })
        );

        const customData = { ...(instanceSettings.jsonData || {}), maxLines: 20 };
        const customSettings = { ...instanceSettings, jsonData: customData };
        ds = new LokiDatasource(customSettings, templateSrvMock);
      });

      it('should return error status and a detailed error message', async () => {
        const result = await ds.testDatasource();
        expect(result.status).toEqual('error');
        expect(result.message).toBe('Loki: Unauthorized. 401. Unauthorized');
      });
    });

    describe('and call fails with 404 error', () => {
      beforeEach(async () => {
        datasourceRequestMock.mockImplementation(() =>
          Promise.reject({
            statusText: 'Not found',
            status: 404,
            data: '404 page not found',
          })
        );
        ds = new LokiDatasource(instanceSettings, {} as TemplateSrv);
        result = await ds.testDatasource();
      });

      it('should return error status and a detailed error message', () => {
        expect(result.status).toEqual('error');
        expect(result.message).toBe('Loki: Not found. 404. 404 page not found');
      });
    });

    describe('and call fails with 502 error', () => {
      beforeEach(async () => {
        datasourceRequestMock.mockImplementation(() =>
          Promise.reject({
            statusText: 'Bad Gateway',
            status: 502,
            data: '',
          })
        );
        ds = new LokiDatasource(instanceSettings, {} as TemplateSrv);
        result = await ds.testDatasource();
      });

      it('should return error status and a detailed error message', () => {
        expect(result.status).toEqual('error');
        expect(result.message).toBe('Loki: Bad Gateway. 502');
      });
    });
  });

  describe('when creating a range query', () => {
    const ds = new LokiDatasource(instanceSettings, templateSrvMock);
    const query: LokiQuery = { expr: 'foo', refId: 'bar' };

    // Loki v1 API has an issue with float step parameters, can be removed when API is fixed
    it('should produce an integer step parameter', () => {
      const range: TimeRange = {
        from: dateTime(0),
        to: dateTime(1e9 + 1),
        raw: { from: '0', to: '1000000001' },
      };
      // Odd timerange/interval combination that would lead to a float step
      const options: RangeQueryOptions = { range, intervalMs: 2000 };
      expect(Number.isInteger(ds.createRangeQuery(query, options).step)).toBeTruthy();
    });
  });

  describe('annotationQuery', () => {
    it('should transform the loki data to annotation response', async () => {
      const ds = new LokiDatasource(instanceSettings, templateSrvMock);
      datasourceRequestMock.mockImplementation(
        jest
          .fn()
          .mockReturnValueOnce(
            Promise.resolve({
              data: [],
              status: 404,
            })
          )
          .mockReturnValueOnce(
            Promise.resolve({
              data: {
                streams: [
                  {
                    entries: [{ ts: '2019-02-01T10:27:37.498180581Z', line: 'hello' }],
                    labels: '{label="value"}',
                  },
                  {
                    entries: [{ ts: '2019-02-01T12:27:37.498180581Z', line: 'hello 2' }],
                    labels: '{label2="value2"}',
                  },
                ],
              },
            })
          )
      );
      const query = makeAnnotationQueryRequest();

      const res = await ds.annotationQuery(query);
      expect(res.length).toBe(2);
      expect(res[0].text).toBe('hello');
      expect(res[0].tags).toEqual(['value']);

      expect(res[1].text).toBe('hello 2');
      expect(res[1].tags).toEqual(['value2']);
    });
  });

  describe('metricFindQuery', () => {
    const ds = new LokiDatasource(instanceSettings, templateSrvMock);
    const mocks = makeMetadataAndVersionsMocks();

    mocks.forEach((mock, index) => {
      it(`should return label names for Loki v${index}`, async () => {
        ds.getVersion = mock.getVersion;
        ds.metadataRequest = mock.metadataRequest;
        const query = 'label_names()';
        const res = await ds.metricFindQuery(query);
        expect(res[0].text).toEqual('label1');
        expect(res[1].text).toEqual('label2');
        expect(res.length).toBe(2);
      });
    });

    mocks.forEach((mock, index) => {
      it(`should return label names for Loki v${index}`, async () => {
        ds.getVersion = mock.getVersion;
        ds.metadataRequest = mock.metadataRequest;
        const query = 'label_names()';
        const res = await ds.metricFindQuery(query);
        expect(res[0].text).toEqual('label1');
        expect(res[1].text).toEqual('label2');
        expect(res.length).toBe(2);
      });
    });

    mocks.forEach((mock, index) => {
      it(`should return label values for Loki v${index}`, async () => {
        ds.getVersion = mock.getVersion;
        ds.metadataRequest = mock.metadataRequest;
        const query = 'label_values(label1)';
        const res = await ds.metricFindQuery(query);
        expect(res[0].text).toEqual('value1');
        expect(res[1].text).toEqual('value2');
        expect(res.length).toBe(2);
      });
    });

    mocks.forEach((mock, index) => {
      it(`should return empty array when incorrect query for Loki v${index}`, async () => {
        ds.getVersion = mock.getVersion;
        ds.metadataRequest = mock.metadataRequest;
        const query = 'incorrect_query';
        const res = await ds.metricFindQuery(query);
        expect(res.length).toBe(0);
      });
    });
  });
});