Skip to content

Commit

Permalink
noImplicitAny: Azure Monitor (#17966)
Browse files Browse the repository at this point in the history
  • Loading branch information
Tobias Skarhed authored and torkelo committed Jul 6, 2019
1 parent 83366b9 commit baed5d7
Show file tree
Hide file tree
Showing 17 changed files with 138 additions and 122 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,17 @@ export default class AppInsightsQuerystringBuilder {
timeGrainUnit = '';
filter = '';

constructor(private from, private to, public grafanaInterval) {}
constructor(private from: any, private to: any, public grafanaInterval: any) {}

setAggregation(aggregation) {
setAggregation(aggregation: string) {
this.aggregation = aggregation;
}

setGroupBy(groupBy) {
setGroupBy(groupBy: string) {
this.groupBy = groupBy;
}

setInterval(timeGrainType, timeGrain, timeGrainUnit) {
setInterval(timeGrainType: string, timeGrain: any, timeGrainUnit: string) {
this.timeGrainType = timeGrainType;
this.timeGrain = timeGrain;
this.timeGrainUnit = timeGrainUnit;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import _ from 'lodash';
import { dateTime } from '@grafana/ui/src/utils/moment_wrapper';

export default class ResponseParser {
constructor(private results) {}
constructor(private results: any) {}

parseQueryResult() {
let data: any = [];
Expand All @@ -27,17 +27,17 @@ export default class ResponseParser {
return data;
}

parseRawQueryResultRow(query: any, columns, rows, xaxis: string, yaxises: string, spliton: string) {
parseRawQueryResultRow(query: any, columns: any, rows: any, xaxis: string, yaxises: string, spliton: string) {
const data: any[] = [];
const columnsForDropdown = _.map(columns, column => ({ text: column.ColumnName, value: column.ColumnName }));

const xaxisColumn = columns.findIndex(column => column.ColumnName === xaxis);
const xaxisColumn = columns.findIndex((column: any) => column.ColumnName === xaxis);
const yaxisesSplit = yaxises.split(',');
const yaxisColumns = {};
const yaxisColumns: any = {};
_.forEach(yaxisesSplit, yaxis => {
yaxisColumns[yaxis] = columns.findIndex(column => column.ColumnName === yaxis);
yaxisColumns[yaxis] = columns.findIndex((column: any) => column.ColumnName === yaxis);
});
const splitonColumn = columns.findIndex(column => column.ColumnName === spliton);
const splitonColumn = columns.findIndex((column: any) => column.ColumnName === spliton);
const convertTimestamp = xaxis === 'timestamp';

_.forEach(rows, row => {
Expand All @@ -57,7 +57,7 @@ export default class ResponseParser {
return data;
}

parseQueryResultRow(query: any, value, alias: string) {
parseQueryResultRow(query: any, value: any, alias: string) {
const data: any[] = [];

if (ResponseParser.isSingleValue(value)) {
Expand Down Expand Up @@ -108,7 +108,7 @@ export default class ResponseParser {
return data;
}

getTargetName(segment, alias: string) {
getTargetName(segment: { [x: string]: string }, alias: string) {
let metric = '';
let segmentName = '';
let segmentValue = '';
Expand Down Expand Up @@ -141,11 +141,11 @@ export default class ResponseParser {
return metric + `{${segmentName}="${segmentValue}"}`;
}

static isSingleValue(value) {
static isSingleValue(value: any) {
return !ResponseParser.hasSegmentsField(value);
}

static findOrCreateBucket(data, target) {
static findOrCreateBucket(data: any[], target: string) {
let dataTarget: any = _.find(data, ['target', target]);
if (!dataTarget) {
dataTarget = { target: target, datapoints: [] };
Expand All @@ -155,29 +155,29 @@ export default class ResponseParser {
return dataTarget;
}

static hasSegmentsField(obj) {
static hasSegmentsField(obj: any) {
const keys = _.keys(obj);
return _.indexOf(keys, 'segments') > -1;
}

static getMetricFieldKey(segment) {
static getMetricFieldKey(segment: { [x: string]: any }) {
const keys = _.keys(segment);

return _.filter(_.without(keys, 'start', 'end'), key => {
return _.isObject(segment[key]);
})[0];
}

static getKeyForAggregationField(dataObj): string {
static getKeyForAggregationField(dataObj: any): string {
const keys = _.keys(dataObj);
return _.intersection(keys, ['sum', 'avg', 'min', 'max', 'count', 'unique'])[0];
}

static dateTimeToEpoch(dateTimeValue) {
static dateTimeToEpoch(dateTimeValue: any) {
return dateTime(dateTimeValue).valueOf();
}

static parseMetricNames(result) {
static parseMetricNames(result: { data: { metrics: any } }) {
const keys = _.keys(result.data.metrics);

return ResponseParser.toTextValueList(keys);
Expand All @@ -202,7 +202,7 @@ export default class ResponseParser {
}

parseQuerySchema() {
const result = {
const result: any = {
Type: 'AppInsights',
Tables: {},
};
Expand All @@ -225,7 +225,7 @@ export default class ResponseParser {
return result;
}

static toTextValueList(values) {
static toTextValueList(values: any) {
const list: any[] = [];
for (let i = 0; i < values.length; i++) {
list.push({
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import AzureMonitorDatasource from '../datasource';
import FakeSchemaData from './__mocks__/schema';
// @ts-ignore
import Q from 'q';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { KustoSchema } from '../types';
Expand Down Expand Up @@ -49,8 +50,8 @@ describe('AzureLogAnalyticsDatasource', () => {
],
};

let workspacesUrl;
let azureLogAnalyticsUrl;
let workspacesUrl: string;
let azureLogAnalyticsUrl: string;

beforeEach(async () => {
ctx.instanceSettings.jsonData.subscriptionId = 'xxx';
Expand Down Expand Up @@ -101,7 +102,7 @@ describe('AzureLogAnalyticsDatasource', () => {
});

it('should return error status and a detailed error message', () => {
return ctx.ds.testDatasource().then(results => {
return ctx.ds.testDatasource().then((results: any) => {
expect(results.status).toEqual('error');
expect(results.message).toEqual(
'1. Azure Log Analytics: Bad Request: InvalidApiVersionParameter. An error message. '
Expand Down Expand Up @@ -174,7 +175,7 @@ describe('AzureLogAnalyticsDatasource', () => {
});

it('should return a list of datapoints', () => {
return ctx.ds.query(options).then(results => {
return ctx.ds.query(options).then((results: any) => {
expect(results.data.length).toBe(2);
expect(results.data[0].datapoints.length).toBe(2);
expect(results.data[0].target).toEqual('Administrative');
Expand Down Expand Up @@ -213,7 +214,7 @@ describe('AzureLogAnalyticsDatasource', () => {
});

it('should throw an exception', () => {
ctx.ds.query(options).catch(err => {
ctx.ds.query(options).catch((err: any) => {
expect(err.message).toContain('The Time Series format requires a time column.');
});
});
Expand All @@ -230,7 +231,7 @@ describe('AzureLogAnalyticsDatasource', () => {
});

it('should return a list of columns and rows', () => {
return ctx.ds.query(options).then(results => {
return ctx.ds.query(options).then((results: any) => {
expect(results.data[0].type).toBe('table');
expect(results.data[0].columns.length).toBe(3);
expect(results.data[0].rows.length).toBe(3);
Expand Down Expand Up @@ -300,7 +301,7 @@ describe('AzureLogAnalyticsDatasource', () => {
],
};

let queryResults;
let queryResults: any[];

beforeEach(async () => {
ctx.backendSrv.datasourceRequest = (options: { url: string }) => {
Expand Down Expand Up @@ -359,7 +360,7 @@ describe('AzureLogAnalyticsDatasource', () => {
],
};

let annotationResults;
let annotationResults: any[];

beforeEach(async () => {
ctx.backendSrv.datasourceRequest = (options: { url: string }) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { AzureMonitorQuery, AzureDataSourceJsonData } from '../types';
import { DataQueryRequest, DataSourceInstanceSettings } from '@grafana/ui/src/types';
import { BackendSrv } from 'app/core/services/backend_srv';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { IQService } from 'angular';

export default class AzureLogAnalyticsDatasource {
id: number;
Expand All @@ -20,7 +21,7 @@ export default class AzureLogAnalyticsDatasource {
private instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>,
private backendSrv: BackendSrv,
private templateSrv: TemplateSrv,
private $q
private $q: IQService
) {
this.id = instanceSettings.id;
this.baseUrl = this.instanceSettings.jsonData.azureLogAnalyticsSameAs
Expand Down Expand Up @@ -57,7 +58,7 @@ export default class AzureLogAnalyticsDatasource {
const workspaceListUrl =
this.azureMonitorUrl +
`/${subscriptionId}/providers/Microsoft.OperationalInsights/workspaces?api-version=2017-04-26-preview`;
return this.doRequest(workspaceListUrl).then(response => {
return this.doRequest(workspaceListUrl).then((response: any) => {
return (
_.map(response.data.value, val => {
return { text: val.name, value: val.properties.customerId };
Expand All @@ -72,7 +73,7 @@ export default class AzureLogAnalyticsDatasource {
}
const url = `${this.baseUrl}/${workspace}/metadata`;

return this.doRequest(url).then(response => {
return this.doRequest(url).then((response: any) => {
return new ResponseParser(response.data).parseSchemaResult();
});
}
Expand Down Expand Up @@ -118,7 +119,7 @@ export default class AzureLogAnalyticsDatasource {
}

metricFindQuery(query: string) {
return this.getDefaultOrFirstWorkspace().then(workspace => {
return this.getDefaultOrFirstWorkspace().then((workspace: any) => {
const queries: any[] = this.buildQuery(query, null, workspace);

const promises = this.doQueries(queries);
Expand Down Expand Up @@ -161,7 +162,7 @@ export default class AzureLogAnalyticsDatasource {
return queries;
}

interpolateVariable(value, variable) {
interpolateVariable(value: string, variable: { multi: any; includeAll: any }) {
if (typeof value === 'string') {
if (variable.multi || variable.includeAll) {
return "'" + value + "'";
Expand Down Expand Up @@ -189,13 +190,13 @@ export default class AzureLogAnalyticsDatasource {
return Promise.resolve(this.defaultOrFirstWorkspace);
}

return this.getWorkspaces(this.subscriptionId).then(workspaces => {
return this.getWorkspaces(this.subscriptionId).then((workspaces: any[]) => {
this.defaultOrFirstWorkspace = workspaces[0].value;
return this.defaultOrFirstWorkspace;
});
}

annotationQuery(options) {
annotationQuery(options: any) {
if (!options.annotation.rawQuery) {
return this.$q.reject({
message: 'Query missing in annotation definition',
Expand All @@ -212,16 +213,16 @@ export default class AzureLogAnalyticsDatasource {
});
}

doQueries(queries) {
doQueries(queries: any[]) {
return _.map(queries, query => {
return this.doRequest(query.url)
.then(result => {
.then((result: any) => {
return {
result: result,
query: query,
};
})
.catch(err => {
.catch((err: any) => {
throw {
error: err,
query: query,
Expand All @@ -230,13 +231,13 @@ export default class AzureLogAnalyticsDatasource {
});
}

doRequest(url, maxRetries = 1) {
doRequest(url: string, maxRetries = 1) {
return this.backendSrv
.datasourceRequest({
url: this.url + url,
method: 'GET',
})
.catch(error => {
.catch((error: any) => {
if (maxRetries > 0) {
return this.doRequest(url, maxRetries - 1);
}
Expand All @@ -252,12 +253,12 @@ export default class AzureLogAnalyticsDatasource {
}

return this.getDefaultOrFirstWorkspace()
.then(ws => {
.then((ws: any) => {
const url = `${this.baseUrl}/${ws}/metadata`;

return this.doRequest(url);
})
.then(response => {
.then((response: any) => {
if (response.status === 200) {
return {
status: 'success',
Expand All @@ -271,7 +272,7 @@ export default class AzureLogAnalyticsDatasource {
message: 'Returned http status code ' + response.status,
};
})
.catch(error => {
.catch((error: any) => {
let message = 'Azure Log Analytics: ';
if (error.config && error.config.url && error.config.url.indexOf('workspacesloganalytics') > -1) {
message = 'Azure Log Analytics requires access to Azure Monitor but had the following error: ';
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import { TimeSeries, AnnotationEvent } from '@grafana/ui/src/types';

export default class ResponseParser {
columns: string[];
constructor(private results) {}
constructor(private results: any) {}

parseQueryResult(): any {
let data: any[] = [];
Expand All @@ -35,7 +35,7 @@ export default class ResponseParser {
return data;
}

parseTimeSeriesResult(query, columns, rows): TimeSeries[] {
parseTimeSeriesResult(query: { refId: string; query: any }, columns: any[], rows: any): TimeSeries[] {
const data: TimeSeries[] = [];
let timeIndex = -1;
let metricIndex = -1;
Expand Down Expand Up @@ -73,7 +73,7 @@ export default class ResponseParser {
return data;
}

parseTableResult(query, columns, rows): AzureLogsTableData {
parseTableResult(query: { refId: string; query: string }, columns: any[], rows: any[]): AzureLogsTableData {
const tableResult: AzureLogsTableData = {
type: 'table',
columns: _.map(columns, col => {
Expand Down Expand Up @@ -206,7 +206,7 @@ export default class ResponseParser {
return functions;
}

static findOrCreateBucket(data, target): TimeSeries {
static findOrCreateBucket(data: TimeSeries[], target: any): TimeSeries {
let dataTarget: any = _.find(data, ['target', target]);
if (!dataTarget) {
dataTarget = { target: target, datapoints: [], refId: '', query: '' };
Expand All @@ -216,7 +216,7 @@ export default class ResponseParser {
return dataTarget;
}

static dateTimeToEpoch(dateTimeValue) {
static dateTimeToEpoch(dateTimeValue: any) {
return dateTime(dateTimeValue).valueOf();
}
}
Loading

0 comments on commit baed5d7

Please sign in to comment.