From d83f886519c1e20d3eadfa03a575ed8d0022cdea Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Tue, 3 Apr 2018 16:36:43 +0200 Subject: [PATCH 001/263] migrated jquery.flot.events to ts --- .../plugins/panel/graph/jquery.flot.events.js | 604 ---------------- .../plugins/panel/graph/jquery.flot.events.ts | 663 ++++++++++++++++++ 2 files changed, 663 insertions(+), 604 deletions(-) delete mode 100644 public/app/plugins/panel/graph/jquery.flot.events.js create mode 100644 public/app/plugins/panel/graph/jquery.flot.events.ts diff --git a/public/app/plugins/panel/graph/jquery.flot.events.js b/public/app/plugins/panel/graph/jquery.flot.events.js deleted file mode 100644 index 3ea3ca8f3304..000000000000 --- a/public/app/plugins/panel/graph/jquery.flot.events.js +++ /dev/null @@ -1,604 +0,0 @@ -define([ - 'jquery', - 'lodash', - 'angular', - 'tether-drop', -], -function ($, _, angular, Drop) { - 'use strict'; - - function createAnnotationToolip(element, event, plot) { - var injector = angular.element(document).injector(); - var content = document.createElement('div'); - content.innerHTML = ''; - - injector.invoke(["$compile", "$rootScope", function($compile, $rootScope) { - var eventManager = plot.getOptions().events.manager; - var tmpScope = $rootScope.$new(true); - tmpScope.event = event; - tmpScope.onEdit = function() { - eventManager.editEvent(event); - }; - - $compile(content)(tmpScope); - tmpScope.$digest(); - tmpScope.$destroy(); - - var drop = new Drop({ - target: element[0], - content: content, - position: "bottom center", - classes: 'drop-popover drop-popover--annotation', - openOn: 'hover', - hoverCloseDelay: 200, - tetherOptions: { - constraints: [{to: 'window', pin: true, attachment: "both"}] - } - }); - - drop.open(); - - drop.on('close', function() { - setTimeout(function() { - drop.destroy(); - }); - }); - }]); - } - - var markerElementToAttachTo = null; - - function createEditPopover(element, event, plot) { - var eventManager = plot.getOptions().events.manager; - if (eventManager.editorOpen) { - // update marker element to attach to (needed in case of legend on the right - // when there is a double render pass and the initial marker element is removed) - markerElementToAttachTo = element; - return; - } - - // mark as openend - eventManager.editorOpened(); - // set marker element to attache to - markerElementToAttachTo = element; - - // wait for element to be attached and positioned - setTimeout(function() { - - var injector = angular.element(document).injector(); - var content = document.createElement('div'); - content.innerHTML = ''; - - injector.invoke(["$compile", "$rootScope", function($compile, $rootScope) { - var scope = $rootScope.$new(true); - var drop; - - scope.event = event; - scope.panelCtrl = eventManager.panelCtrl; - scope.close = function() { - drop.close(); - }; - - $compile(content)(scope); - scope.$digest(); - - drop = new Drop({ - target: markerElementToAttachTo[0], - content: content, - position: "bottom center", - classes: 'drop-popover drop-popover--form', - openOn: 'click', - tetherOptions: { - constraints: [{to: 'window', pin: true, attachment: "both"}] - } - }); - - drop.open(); - eventManager.editorOpened(); - - drop.on('close', function() { - // need timeout here in order call drop.destroy - setTimeout(function() { - eventManager.editorClosed(); - scope.$destroy(); - drop.destroy(); - }); - }); - }]); - - }, 100); - } - - /* - * jquery.flot.events - * - * description: Flot plugin for adding events/markers to the plot - * version: 0.2.5 - * authors: - * Alexander Wunschik - * Joel Oughton - * Nicolas Joseph - * - * website: https://github.com/mojoaxel/flot-events - * - * released under MIT License and GPLv2+ - */ - - /** - * A class that allows for the drawing an remove of some object - */ - var DrawableEvent = function(object, drawFunc, clearFunc, moveFunc, left, top, width, height) { - var _object = object; - var _drawFunc = drawFunc; - var _clearFunc = clearFunc; - var _moveFunc = moveFunc; - var _position = { left: left, top: top }; - var _width = width; - var _height = height; - - this.width = function() { return _width; }; - this.height = function() { return _height; }; - this.position = function() { return _position; }; - this.draw = function() { _drawFunc(_object); }; - this.clear = function() { _clearFunc(_object); }; - this.getObject = function() { return _object; }; - this.moveTo = function(position) { - _position = position; - _moveFunc(_object, _position); - }; - }; - - /** - * Event class that stores options (eventType, min, max, title, description) and the object to draw. - */ - var VisualEvent = function(options, drawableEvent) { - var _parent; - var _options = options; - var _drawableEvent = drawableEvent; - var _hidden = false; - - this.visual = function() { return _drawableEvent; }; - this.getOptions = function() { return _options; }; - this.getParent = function() { return _parent; }; - this.isHidden = function() { return _hidden; }; - this.hide = function() { _hidden = true; }; - this.unhide = function() { _hidden = false; }; - }; - - /** - * A Class that handles the event-markers inside the given plot - */ - var EventMarkers = function(plot) { - var _events = []; - - this._types = []; - this._plot = plot; - this.eventsEnabled = false; - - this.getEvents = function() { - return _events; - }; - - this.setTypes = function(types) { - return this._types = types; - }; - - /** - * create internal objects for the given events - */ - this.setupEvents = function(events) { - var that = this; - var parts = _.partition(events, 'isRegion'); - var regions = parts[0]; - events = parts[1]; - - $.each(events, function(index, event) { - var ve = new VisualEvent(event, that._buildDiv(event)); - _events.push(ve); - }); - - $.each(regions, function (index, event) { - var vre = new VisualEvent(event, that._buildRegDiv(event)); - _events.push(vre); - }); - - _events.sort(function(a, b) { - var ao = a.getOptions(), bo = b.getOptions(); - if (ao.min > bo.min) { return 1; } - if (ao.min < bo.min) { return -1; } - return 0; - }); - }; - - /** - * draw the events to the plot - */ - this.drawEvents = function() { - var that = this; - // var o = this._plot.getPlotOffset(); - - $.each(_events, function(index, event) { - // check event is inside the graph range - if (that._insidePlot(event.getOptions().min) && !event.isHidden()) { - event.visual().draw(); - } else { - event.visual().getObject().hide(); - } - }); - }; - - /** - * update the position of the event-markers (e.g. after scrolling or zooming) - */ - this.updateEvents = function() { - var that = this; - var o = this._plot.getPlotOffset(), left, top; - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - - $.each(_events, function(index, event) { - top = o.top + that._plot.height() - event.visual().height(); - left = xaxis.p2c(event.getOptions().min) + o.left - event.visual().width() / 2; - event.visual().moveTo({ top: top, left: left }); - }); - }; - - /** - * remove all events from the plot - */ - this._clearEvents = function() { - $.each(_events, function(index, val) { - val.visual().clear(); - }); - _events = []; - }; - - /** - * create a DOM element for the given event - */ - this._buildDiv = function(event) { - var that = this; - - var container = this._plot.getPlaceholder(); - var o = this._plot.getPlotOffset(); - var axes = this._plot.getAxes(); - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - var yaxis, top, left, color, markerSize, markerShow, lineStyle, lineWidth; - var markerTooltip; - - // determine the y axis used - if (axes.yaxis && axes.yaxis.used) { yaxis = axes.yaxis; } - if (axes.yaxis2 && axes.yaxis2.used) { yaxis = axes.yaxis2; } - - // map the eventType to a types object - var eventTypeId = event.eventType; - - if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { - color = '#666'; - } else { - color = this._types[eventTypeId].color; - } - - if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].markerSize) { - markerSize = 8; //default marker size - } else { - markerSize = this._types[eventTypeId].markerSize; - } - - if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerShow === undefined) { - markerShow = true; - } else { - markerShow = this._types[eventTypeId].markerShow; - } - - if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { - markerTooltip = true; - } else { - markerTooltip = this._types[eventTypeId].markerTooltip; - } - - if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { - lineStyle = 'dashed'; //default line style - } else { - lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); - } - - if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { - lineWidth = 1; //default line width - } else { - lineWidth = this._types[eventTypeId].lineWidth; - } - - var topOffset = xaxis.options.eventSectionHeight || 0; - topOffset = topOffset / 3; - - top = o.top + this._plot.height() + topOffset; - left = xaxis.p2c(event.min) + o.left; - - var line = $('
').css({ - "position": "absolute", - "opacity": 0.8, - "left": left + 'px', - "top": 8, - "width": lineWidth + "px", - "height": this._plot.height() + topOffset * 0.8, - "border-left-width": lineWidth + "px", - "border-left-style": lineStyle, - "border-left-color": color, - "color": color - }) - .appendTo(container); - - if (markerShow) { - var marker = $('
').css({ - "position": "absolute", - "left": (-markerSize - Math.round(lineWidth / 2)) + "px", - "font-size": 0, - "line-height": 0, - "width": 0, - "height": 0, - "border-left": markerSize+"px solid transparent", - "border-right": markerSize+"px solid transparent" - }); - - marker.appendTo(line); - - if (this._types[eventTypeId] && this._types[eventTypeId].position && this._types[eventTypeId].position.toUpperCase() === 'BOTTOM') { - marker.css({ - "top": top-markerSize-8 +"px", - "border-top": "none", - "border-bottom": markerSize+"px solid " + color - }); - } else { - marker.css({ - "top": "0px", - "border-top": markerSize+"px solid " + color, - "border-bottom": "none" - }); - } - - marker.data({ - "event": event - }); - - var mouseenter = function() { - createAnnotationToolip(marker, $(this).data("event"), that._plot); - }; - - if (event.editModel) { - createEditPopover(marker, event.editModel, that._plot); - } - - var mouseleave = function() { - that._plot.clearSelection(); - }; - - if (markerTooltip) { - marker.css({ "cursor": "help" }); - marker.hover(mouseenter, mouseleave); - } - } - - var drawableEvent = new DrawableEvent( - line, - function drawFunc(obj) { obj.show(); }, - function(obj) { obj.remove(); }, - function(obj, position) { - obj.css({ - top: position.top, - left: position.left - }); - }, - left, - top, - line.width(), - line.height() - ); - - return drawableEvent; - }; - - /** - * create a DOM element for the given region - */ - this._buildRegDiv = function (event) { - var that = this; - - var container = this._plot.getPlaceholder(); - var o = this._plot.getPlotOffset(); - var axes = this._plot.getAxes(); - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - var yaxis, top, left, lineWidth, regionWidth, lineStyle, color, markerTooltip; - - // determine the y axis used - if (axes.yaxis && axes.yaxis.used) { yaxis = axes.yaxis; } - if (axes.yaxis2 && axes.yaxis2.used) { yaxis = axes.yaxis2; } - - // map the eventType to a types object - var eventTypeId = event.eventType; - - if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { - color = '#666'; - } else { - color = this._types[eventTypeId].color; - } - - if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { - markerTooltip = true; - } else { - markerTooltip = this._types[eventTypeId].markerTooltip; - } - - if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { - lineWidth = 1; //default line width - } else { - lineWidth = this._types[eventTypeId].lineWidth; - } - - if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { - lineStyle = 'dashed'; //default line style - } else { - lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); - } - - var topOffset = 2; - top = o.top + this._plot.height() + topOffset; - - var timeFrom = Math.min(event.min, event.timeEnd); - var timeTo = Math.max(event.min, event.timeEnd); - left = xaxis.p2c(timeFrom) + o.left; - var right = xaxis.p2c(timeTo) + o.left; - regionWidth = right - left; - - _.each([left, right], function(position) { - var line = $('
').css({ - "position": "absolute", - "opacity": 0.8, - "left": position + 'px', - "top": 8, - "width": lineWidth + "px", - "height": that._plot.height() + topOffset, - "border-left-width": lineWidth + "px", - "border-left-style": lineStyle, - "border-left-color": color, - "color": color - }); - line.appendTo(container); - }); - - var region = $('
').css({ - "position": "absolute", - "opacity": 0.5, - "left": left + 'px', - "top": top, - "width": Math.round(regionWidth + lineWidth) + "px", - "height": "0.5rem", - "border-left-color": color, - "color": color, - "background-color": color - }); - region.appendTo(container); - - region.data({ - "event": event - }); - - var mouseenter = function () { - createAnnotationToolip(region, $(this).data("event"), that._plot); - }; - - if (event.editModel) { - createEditPopover(region, event.editModel, that._plot); - } - - var mouseleave = function () { - that._plot.clearSelection(); - }; - - if (markerTooltip) { - region.css({ "cursor": "help" }); - region.hover(mouseenter, mouseleave); - } - - var drawableEvent = new DrawableEvent( - region, - function drawFunc(obj) { obj.show(); }, - function (obj) { obj.remove(); }, - function (obj, position) { - obj.css({ - top: position.top, - left: position.left - }); - }, - left, - top, - region.width(), - region.height() - ); - - return drawableEvent; - }; - - /** - * check if the event is inside visible range - */ - this._insidePlot = function(x) { - var xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - var xc = xaxis.p2c(x); - return xc > 0 && xc < xaxis.p2c(xaxis.max); - }; - }; - - /** - * initialize the plugin for the given plot - */ - function init(plot) { - /*jshint validthis:true */ - var that = this; - var eventMarkers = new EventMarkers(plot); - - plot.getEvents = function() { - return eventMarkers._events; - }; - - plot.hideEvents = function() { - $.each(eventMarkers._events, function(index, event) { - event.visual().getObject().hide(); - }); - }; - - plot.showEvents = function() { - plot.hideEvents(); - $.each(eventMarkers._events, function(index, event) { - event.hide(); - }); - - that.eventMarkers.drawEvents(); - }; - - // change events on an existing plot - plot.setEvents = function(events) { - if (eventMarkers.eventsEnabled) { - eventMarkers.setupEvents(events); - } - }; - - plot.hooks.processOptions.push(function(plot, options) { - // enable the plugin - if (options.events.data != null) { - eventMarkers.eventsEnabled = true; - } - }); - - plot.hooks.draw.push(function(plot) { - var options = plot.getOptions(); - - if (eventMarkers.eventsEnabled) { - // check for first run - if (eventMarkers.getEvents().length < 1) { - eventMarkers.setTypes(options.events.types); - eventMarkers.setupEvents(options.events.data); - } else { - eventMarkers.updateEvents(); - } - } - - eventMarkers.drawEvents(); - }); - } - - var defaultOptions = { - events: { - data: null, - types: null, - xaxis: 1, - position: 'BOTTOM' - } - }; - - $.plot.plugins.push({ - init: init, - options: defaultOptions, - name: "events", - version: "0.2.5" - }); -}); diff --git a/public/app/plugins/panel/graph/jquery.flot.events.ts b/public/app/plugins/panel/graph/jquery.flot.events.ts new file mode 100644 index 000000000000..642883ff75c4 --- /dev/null +++ b/public/app/plugins/panel/graph/jquery.flot.events.ts @@ -0,0 +1,663 @@ +import $ from 'jquery'; +import _ from 'lodash'; +import angular from 'angular'; +import Drop from 'tether-drop'; + +function createAnnotationToolip(element, event, plot) { + let injector = angular.element(document).injector(); + let content = document.createElement('div'); + content.innerHTML = ''; + + injector.invoke([ + '$compile', + '$rootScope', + function($compile, $rootScope) { + let eventManager = plot.getOptions().events.manager; + let tmpScope = $rootScope.$new(true); + tmpScope.event = event; + tmpScope.onEdit = function() { + eventManager.editEvent(event); + }; + + $compile(content)(tmpScope); + tmpScope.$digest(); + tmpScope.$destroy(); + + let drop = new Drop({ + target: element[0], + content: content, + position: 'bottom center', + classes: 'drop-popover drop-popover--annotation', + openOn: 'hover', + hoverCloseDelay: 200, + tetherOptions: { + constraints: [{ to: 'window', pin: true, attachment: 'both' }], + }, + }); + + drop.open(); + + drop.on('close', function() { + setTimeout(function() { + drop.destroy(); + }); + }); + }, + ]); +} + +let markerElementToAttachTo = null; + +function createEditPopover(element, event, plot) { + let eventManager = plot.getOptions().events.manager; + if (eventManager.editorOpen) { + // update marker element to attach to (needed in case of legend on the right + // when there is a double render pass and the inital marker element is removed) + markerElementToAttachTo = element; + return; + } + + // mark as openend + eventManager.editorOpened(); + // set marker elment to attache to + markerElementToAttachTo = element; + + // wait for element to be attached and positioned + setTimeout(function() { + let injector = angular.element(document).injector(); + let content = document.createElement('div'); + content.innerHTML = ''; + + injector.invoke([ + '$compile', + '$rootScope', + function($compile, $rootScope) { + let scope = $rootScope.$new(true); + let drop; + + scope.event = event; + scope.panelCtrl = eventManager.panelCtrl; + scope.close = function() { + drop.close(); + }; + + $compile(content)(scope); + scope.$digest(); + + drop = new Drop({ + target: markerElementToAttachTo[0], + content: content, + position: 'bottom center', + classes: 'drop-popover drop-popover--form', + openOn: 'click', + tetherOptions: { + constraints: [{ to: 'window', pin: true, attachment: 'both' }], + }, + }); + + drop.open(); + eventManager.editorOpened(); + + drop.on('close', function() { + // need timeout here in order call drop.destroy + setTimeout(function() { + eventManager.editorClosed(); + scope.$destroy(); + drop.destroy(); + }); + }); + }, + ]); + }, 100); +} + +/* + * jquery.flot.events + * + * description: Flot plugin for adding events/markers to the plot + * version: 0.2.5 + * authors: + * Alexander Wunschik + * Joel Oughton + * Nicolas Joseph + * + * website: https://github.com/mojoaxel/flot-events + * + * released under MIT License and GPLv2+ + */ + +/** + * A class that allows for the drawing an remove of some object + */ +let DrawableEvent = function(object, drawFunc, clearFunc, moveFunc, left, top, width, height) { + let _object = object; + let _drawFunc = drawFunc; + let _clearFunc = clearFunc; + let _moveFunc = moveFunc; + let _position = { left: left, top: top }; + let _width = width; + let _height = height; + + this.width = function() { + return _width; + }; + this.height = function() { + return _height; + }; + this.position = function() { + return _position; + }; + this.draw = function() { + _drawFunc(_object); + }; + this.clear = function() { + _clearFunc(_object); + }; + this.getObject = function() { + return _object; + }; + this.moveTo = function(position) { + _position = position; + _moveFunc(_object, _position); + }; +}; + +/** + * Event class that stores options (eventType, min, max, title, description) and the object to draw. + */ +let VisualEvent = function(options, drawableEvent) { + let _parent; + let _options = options; + let _drawableEvent = drawableEvent; + let _hidden = false; + + this.visual = function() { + return _drawableEvent; + }; + this.getOptions = function() { + return _options; + }; + this.getParent = function() { + return _parent; + }; + this.isHidden = function() { + return _hidden; + }; + this.hide = function() { + _hidden = true; + }; + this.unhide = function() { + _hidden = false; + }; +}; + +/** + * A Class that handles the event-markers inside the given plot + */ +let EventMarkers = function(plot) { + let _events = []; + + this._types = []; + this._plot = plot; + this.eventsEnabled = false; + + this.getEvents = function() { + return _events; + }; + + this.setTypes = function(types) { + return (this._types = types); + }; + + /** + * create internal objects for the given events + */ + this.setupEvents = function(events) { + let that = this; + let parts = _.partition(events, 'isRegion'); + let regions = parts[0]; + events = parts[1]; + + $.each(events, function(index, event) { + let ve = new VisualEvent(event, that._buildDiv(event)); + _events.push(ve); + }); + + $.each(regions, function(index, event) { + let vre = new VisualEvent(event, that._buildRegDiv(event)); + _events.push(vre); + }); + + _events.sort(function(a, b) { + let ao = a.getOptions(), + bo = b.getOptions(); + if (ao.min > bo.min) { + return 1; + } + if (ao.min < bo.min) { + return -1; + } + return 0; + }); + }; + + /** + * draw the events to the plot + */ + this.drawEvents = function() { + let that = this; + // let o = this._plot.getPlotOffset(); + + $.each(_events, function(index, event) { + // check event is inside the graph range + if (that._insidePlot(event.getOptions().min) && !event.isHidden()) { + event.visual().draw(); + } else { + event + .visual() + .getObject() + .hide(); + } + }); + }; + + /** + * update the position of the event-markers (e.g. after scrolling or zooming) + */ + this.updateEvents = function() { + let that = this; + let o = this._plot.getPlotOffset(), + left, + top; + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + + $.each(_events, function(index, event) { + top = o.top + that._plot.height() - event.visual().height(); + left = xaxis.p2c(event.getOptions().min) + o.left - event.visual().width() / 2; + event.visual().moveTo({ top: top, left: left }); + }); + }; + + /** + * remove all events from the plot + */ + this._clearEvents = function() { + $.each(_events, function(index, val) { + val.visual().clear(); + }); + _events = []; + }; + + /** + * create a DOM element for the given event + */ + this._buildDiv = function(event) { + let that = this; + + let container = this._plot.getPlaceholder(); + let o = this._plot.getPlotOffset(); + let axes = this._plot.getAxes(); + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + let yaxis, top, left, color, markerSize, markerShow, lineStyle, lineWidth; + let markerTooltip; + + // determine the y axis used + if (axes.yaxis && axes.yaxis.used) { + yaxis = axes.yaxis; + } + if (axes.yaxis2 && axes.yaxis2.used) { + yaxis = axes.yaxis2; + } + + // map the eventType to a types object + let eventTypeId = event.eventType; + + if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { + color = '#666'; + } else { + color = this._types[eventTypeId].color; + } + + if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].markerSize) { + markerSize = 8; //default marker size + } else { + markerSize = this._types[eventTypeId].markerSize; + } + + if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerShow === undefined) { + markerShow = true; + } else { + markerShow = this._types[eventTypeId].markerShow; + } + + if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { + markerTooltip = true; + } else { + markerTooltip = this._types[eventTypeId].markerTooltip; + } + + if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { + lineStyle = 'dashed'; //default line style + } else { + lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); + } + + if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { + lineWidth = 1; //default line width + } else { + lineWidth = this._types[eventTypeId].lineWidth; + } + + let topOffset = xaxis.options.eventSectionHeight || 0; + topOffset = topOffset / 3; + + top = o.top + this._plot.height() + topOffset; + left = xaxis.p2c(event.min) + o.left; + + let line = $('
') + .css({ + position: 'absolute', + opacity: 0.8, + left: left + 'px', + top: 8, + width: lineWidth + 'px', + height: this._plot.height() + topOffset * 0.8, + 'border-left-width': lineWidth + 'px', + 'border-left-style': lineStyle, + 'border-left-color': color, + color: color, + }) + .appendTo(container); + + if (markerShow) { + let marker = $('
').css({ + position: 'absolute', + left: -markerSize - Math.round(lineWidth / 2) + 'px', + 'font-size': 0, + 'line-height': 0, + width: 0, + height: 0, + 'border-left': markerSize + 'px solid transparent', + 'border-right': markerSize + 'px solid transparent', + }); + + marker.appendTo(line); + + if ( + this._types[eventTypeId] && + this._types[eventTypeId].position && + this._types[eventTypeId].position.toUpperCase() === 'BOTTOM' + ) { + marker.css({ + top: top - markerSize - 8 + 'px', + 'border-top': 'none', + 'border-bottom': markerSize + 'px solid ' + color, + }); + } else { + marker.css({ + top: '0px', + 'border-top': markerSize + 'px solid ' + color, + 'border-bottom': 'none', + }); + } + + marker.data({ + event: event, + }); + + let mouseenter = function() { + createAnnotationToolip(marker, $(this).data('event'), that._plot); + }; + + if (event.editModel) { + createEditPopover(marker, event.editModel, that._plot); + } + + let mouseleave = function() { + that._plot.clearSelection(); + }; + + if (markerTooltip) { + marker.css({ cursor: 'help' }); + marker.hover(mouseenter, mouseleave); + } + } + + let drawableEvent = new DrawableEvent( + line, + function drawFunc(obj) { + obj.show(); + }, + function(obj) { + obj.remove(); + }, + function(obj, position) { + obj.css({ + top: position.top, + left: position.left, + }); + }, + left, + top, + line.width(), + line.height() + ); + + return drawableEvent; + }; + + /** + * create a DOM element for the given region + */ + this._buildRegDiv = function(event) { + let that = this; + + let container = this._plot.getPlaceholder(); + let o = this._plot.getPlotOffset(); + let axes = this._plot.getAxes(); + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + let yaxis, top, left, lineWidth, regionWidth, lineStyle, color, markerTooltip; + + // determine the y axis used + if (axes.yaxis && axes.yaxis.used) { + yaxis = axes.yaxis; + } + if (axes.yaxis2 && axes.yaxis2.used) { + yaxis = axes.yaxis2; + } + + // map the eventType to a types object + let eventTypeId = event.eventType; + + if (this._types === null || !this._types[eventTypeId] || !this._types[eventTypeId].color) { + color = '#666'; + } else { + color = this._types[eventTypeId].color; + } + + if (this._types === null || !this._types[eventTypeId] || this._types[eventTypeId].markerTooltip === undefined) { + markerTooltip = true; + } else { + markerTooltip = this._types[eventTypeId].markerTooltip; + } + + if (this._types == null || !this._types[eventTypeId] || this._types[eventTypeId].lineWidth === undefined) { + lineWidth = 1; //default line width + } else { + lineWidth = this._types[eventTypeId].lineWidth; + } + + if (this._types == null || !this._types[eventTypeId] || !this._types[eventTypeId].lineStyle) { + lineStyle = 'dashed'; //default line style + } else { + lineStyle = this._types[eventTypeId].lineStyle.toLowerCase(); + } + + let topOffset = 2; + top = o.top + this._plot.height() + topOffset; + + let timeFrom = Math.min(event.min, event.timeEnd); + let timeTo = Math.max(event.min, event.timeEnd); + left = xaxis.p2c(timeFrom) + o.left; + let right = xaxis.p2c(timeTo) + o.left; + regionWidth = right - left; + + _.each([left, right], function(position) { + let line = $('
').css({ + position: 'absolute', + opacity: 0.8, + left: position + 'px', + top: 8, + width: lineWidth + 'px', + height: that._plot.height() + topOffset, + 'border-left-width': lineWidth + 'px', + 'border-left-style': lineStyle, + 'border-left-color': color, + color: color, + }); + line.appendTo(container); + }); + + let region = $('
').css({ + position: 'absolute', + opacity: 0.5, + left: left + 'px', + top: top, + width: Math.round(regionWidth + lineWidth) + 'px', + height: '0.5rem', + 'border-left-color': color, + color: color, + 'background-color': color, + }); + region.appendTo(container); + + region.data({ + event: event, + }); + + let mouseenter = function() { + createAnnotationToolip(region, $(this).data('event'), that._plot); + }; + + if (event.editModel) { + createEditPopover(region, event.editModel, that._plot); + } + + let mouseleave = function() { + that._plot.clearSelection(); + }; + + if (markerTooltip) { + region.css({ cursor: 'help' }); + region.hover(mouseenter, mouseleave); + } + + let drawableEvent = new DrawableEvent( + region, + function drawFunc(obj) { + obj.show(); + }, + function(obj) { + obj.remove(); + }, + function(obj, position) { + obj.css({ + top: position.top, + left: position.left, + }); + }, + left, + top, + region.width(), + region.height() + ); + + return drawableEvent; + }; + + /** + * check if the event is inside visible range + */ + this._insidePlot = function(x) { + let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; + let xc = xaxis.p2c(x); + return xc > 0 && xc < xaxis.p2c(xaxis.max); + }; +}; + +/** + * initialize the plugin for the given plot + */ +function init(plot) { + /*jshint validthis:true */ + let that = this; + let eventMarkers = new EventMarkers(plot); + + plot.getEvents = function() { + return eventMarkers._events; + }; + + plot.hideEvents = function() { + $.each(eventMarkers._events, function(index, event) { + event + .visual() + .getObject() + .hide(); + }); + }; + + plot.showEvents = function() { + plot.hideEvents(); + $.each(eventMarkers._events, function(index, event) { + event.hide(); + }); + + that.eventMarkers.drawEvents(); + }; + + // change events on an existing plot + plot.setEvents = function(events) { + if (eventMarkers.eventsEnabled) { + eventMarkers.setupEvents(events); + } + }; + + plot.hooks.processOptions.push(function(plot, options) { + // enable the plugin + if (options.events.data != null) { + eventMarkers.eventsEnabled = true; + } + }); + + plot.hooks.draw.push(function(plot) { + let options = plot.getOptions(); + + if (eventMarkers.eventsEnabled) { + // check for first run + if (eventMarkers.getEvents().length < 1) { + eventMarkers.setTypes(options.events.types); + eventMarkers.setupEvents(options.events.data); + } else { + eventMarkers.updateEvents(); + } + } + + eventMarkers.drawEvents(); + }); +} + +let defaultOptions = { + events: { + data: null, + types: null, + xaxis: 1, + position: 'BOTTOM', + }, +}; + +$.plot.plugins.push({ + init: init, + options: defaultOptions, + name: 'events', + version: '0.2.5', +}); From b2027af4cb3cdca3b84e3bf7547bf90ab38a240e Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Tue, 10 Apr 2018 14:16:56 +0200 Subject: [PATCH 002/263] wrote classes --- .../plugins/panel/graph/jquery.flot.events.ts | 264 +++++++++--------- 1 file changed, 136 insertions(+), 128 deletions(-) diff --git a/public/app/plugins/panel/graph/jquery.flot.events.ts b/public/app/plugins/panel/graph/jquery.flot.events.ts index 642883ff75c4..9dfe0a8573f3 100644 --- a/public/app/plugins/panel/graph/jquery.flot.events.ts +++ b/public/app/plugins/panel/graph/jquery.flot.events.ts @@ -1,9 +1,10 @@ +import angular from 'angular'; import $ from 'jquery'; import _ from 'lodash'; -import angular from 'angular'; import Drop from 'tether-drop'; -function createAnnotationToolip(element, event, plot) { +/** @ngInject */ +export function createAnnotationToolip(element, event, plot) { let injector = angular.element(document).injector(); let content = document.createElement('div'); content.innerHTML = ''; @@ -48,7 +49,8 @@ function createAnnotationToolip(element, event, plot) { let markerElementToAttachTo = null; -function createEditPopover(element, event, plot) { +/** @ngInject */ +export function createEditPopover(element, event, plot) { let eventManager = plot.getOptions().events.manager; if (eventManager.editorOpen) { // update marker element to attach to (needed in case of legend on the right @@ -129,106 +131,130 @@ function createEditPopover(element, event, plot) { /** * A class that allows for the drawing an remove of some object */ -let DrawableEvent = function(object, drawFunc, clearFunc, moveFunc, left, top, width, height) { - let _object = object; - let _drawFunc = drawFunc; - let _clearFunc = clearFunc; - let _moveFunc = moveFunc; - let _position = { left: left, top: top }; - let _width = width; - let _height = height; - - this.width = function() { - return _width; - }; - this.height = function() { - return _height; - }; - this.position = function() { - return _position; - }; - this.draw = function() { - _drawFunc(_object); - }; - this.clear = function() { - _clearFunc(_object); - }; - this.getObject = function() { - return _object; - }; - this.moveTo = function(position) { - _position = position; - _moveFunc(_object, _position); - }; -}; +export class DrawableEvent { + _object: any; + _drawFunc: any; + _clearFunc: any; + _moveFunc: any; + _position: any; + _width: any; + _height: any; + + /** @ngInject */ + constructor(object, drawFunc, clearFunc, moveFunc, left, top, width, height) { + this._object = object; + this._drawFunc = drawFunc; + this._clearFunc = clearFunc; + this._moveFunc = moveFunc; + this._position = { left: left, top: top }; + this._width = width; + this._height = height; + } + + width() { + return this._width; + } + height() { + return this._height; + } + position() { + return this._position; + } + draw() { + this._drawFunc(this._object); + } + clear() { + this._clearFunc(this._object); + } + getObject() { + return this._object; + } + moveTo(position) { + this._position = position; + this._moveFunc(this._object, this._position); + } +} /** * Event class that stores options (eventType, min, max, title, description) and the object to draw. */ -let VisualEvent = function(options, drawableEvent) { - let _parent; - let _options = options; - let _drawableEvent = drawableEvent; - let _hidden = false; - - this.visual = function() { - return _drawableEvent; - }; - this.getOptions = function() { - return _options; - }; - this.getParent = function() { - return _parent; - }; - this.isHidden = function() { - return _hidden; - }; - this.hide = function() { - _hidden = true; - }; - this.unhide = function() { - _hidden = false; - }; -}; +export class VisualEvent { + _parent: any; + _options: any; + _drawableEvent: any; + _hidden: any; + + /** @ngInject */ + constructor(options, drawableEvent) { + this._options = options; + this._drawableEvent = drawableEvent; + this._hidden = false; + } + + visual() { + return this._drawableEvent; + } + getOptions() { + return this._options; + } + getParent() { + return this._parent; + } + isHidden() { + return this._hidden; + } + hide() { + this._hidden = true; + } + unhide() { + this._hidden = false; + } +} /** * A Class that handles the event-markers inside the given plot */ -let EventMarkers = function(plot) { - let _events = []; - - this._types = []; - this._plot = plot; - this.eventsEnabled = false; +export class EventMarkers { + _events: any; + _types: any; + _plot: any; + eventsEnabled: any; + + /** @ngInject */ + constructor(plot) { + this._events = []; + this._types = []; + this._plot = plot; + this.eventsEnabled = false; + } - this.getEvents = function() { - return _events; - }; + getEvents() { + return this._events; + } - this.setTypes = function(types) { + setTypes(types) { return (this._types = types); - }; + } /** * create internal objects for the given events */ - this.setupEvents = function(events) { - let that = this; + setupEvents(events) { let parts = _.partition(events, 'isRegion'); let regions = parts[0]; events = parts[1]; - $.each(events, function(index, event) { - let ve = new VisualEvent(event, that._buildDiv(event)); - _events.push(ve); + $.each(events, (index, event) => { + let ve = new VisualEvent(event, this._buildDiv(event)); + this._events.push(ve); }); - $.each(regions, function(index, event) { - let vre = new VisualEvent(event, that._buildRegDiv(event)); - _events.push(vre); + $.each(regions, (index, event) => { + let vre = new VisualEvent(event, this._buildRegDiv(event)); + this._events.push(vre); }); - _events.sort(function(a, b) { + this._events.sort((a, b) => { let ao = a.getOptions(), bo = b.getOptions(); if (ao.min > bo.min) { @@ -239,18 +265,17 @@ let EventMarkers = function(plot) { } return 0; }); - }; + } /** * draw the events to the plot */ - this.drawEvents = function() { - let that = this; - // let o = this._plot.getPlotOffset(); + drawEvents() { + // var o = this._plot.getPlotOffset(); - $.each(_events, function(index, event) { + $.each(this._events, (index, event) => { // check event is inside the graph range - if (that._insidePlot(event.getOptions().min) && !event.isHidden()) { + if (this._insidePlot(event.getOptions().min) && !event.isHidden()) { event.visual().draw(); } else { event @@ -259,56 +284,46 @@ let EventMarkers = function(plot) { .hide(); } }); - }; + } /** * update the position of the event-markers (e.g. after scrolling or zooming) */ - this.updateEvents = function() { - let that = this; + updateEvents() { let o = this._plot.getPlotOffset(), left, top; let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - $.each(_events, function(index, event) { - top = o.top + that._plot.height() - event.visual().height(); + $.each(this._events, (index, event) => { + top = o.top + this._plot.height() - event.visual().height(); left = xaxis.p2c(event.getOptions().min) + o.left - event.visual().width() / 2; event.visual().moveTo({ top: top, left: left }); }); - }; + } /** * remove all events from the plot */ - this._clearEvents = function() { - $.each(_events, function(index, val) { + _clearEvents() { + $.each(this._events, (index, val) => { val.visual().clear(); }); - _events = []; - }; + this._events = []; + } /** * create a DOM element for the given event */ - this._buildDiv = function(event) { + _buildDiv(event) { let that = this; let container = this._plot.getPlaceholder(); let o = this._plot.getPlotOffset(); - let axes = this._plot.getAxes(); let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - let yaxis, top, left, color, markerSize, markerShow, lineStyle, lineWidth; + let top, left, color, markerSize, markerShow, lineStyle, lineWidth; let markerTooltip; - // determine the y axis used - if (axes.yaxis && axes.yaxis.used) { - yaxis = axes.yaxis; - } - if (axes.yaxis2 && axes.yaxis2.used) { - yaxis = axes.yaxis2; - } - // map the eventType to a types object let eventTypeId = event.eventType; @@ -444,27 +459,18 @@ let EventMarkers = function(plot) { ); return drawableEvent; - }; + } /** * create a DOM element for the given region */ - this._buildRegDiv = function(event) { + _buildRegDiv(event) { let that = this; let container = this._plot.getPlaceholder(); let o = this._plot.getPlotOffset(); - let axes = this._plot.getAxes(); let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; - let yaxis, top, left, lineWidth, regionWidth, lineStyle, color, markerTooltip; - - // determine the y axis used - if (axes.yaxis && axes.yaxis.used) { - yaxis = axes.yaxis; - } - if (axes.yaxis2 && axes.yaxis2.used) { - yaxis = axes.yaxis2; - } + let top, left, lineWidth, regionWidth, lineStyle, color, markerTooltip; // map the eventType to a types object let eventTypeId = event.eventType; @@ -502,14 +508,14 @@ let EventMarkers = function(plot) { let right = xaxis.p2c(timeTo) + o.left; regionWidth = right - left; - _.each([left, right], function(position) { + _.each([left, right], position => { let line = $('
').css({ position: 'absolute', opacity: 0.8, left: position + 'px', top: 8, width: lineWidth + 'px', - height: that._plot.height() + topOffset, + height: this._plot.height() + topOffset, 'border-left-width': lineWidth + 'px', 'border-left-style': lineStyle, 'border-left-color': color, @@ -573,22 +579,24 @@ let EventMarkers = function(plot) { ); return drawableEvent; - }; + } /** * check if the event is inside visible range */ - this._insidePlot = function(x) { + _insidePlot(x) { let xaxis = this._plot.getXAxes()[this._plot.getOptions().events.xaxis - 1]; let xc = xaxis.p2c(x); return xc > 0 && xc < xaxis.p2c(xaxis.max); - }; -}; + } +} /** * initialize the plugin for the given plot */ -function init(plot) { + +/** @ngInject */ +export function init(plot) { /*jshint validthis:true */ let that = this; let eventMarkers = new EventMarkers(plot); @@ -598,7 +606,7 @@ function init(plot) { }; plot.hideEvents = function() { - $.each(eventMarkers._events, function(index, event) { + $.each(eventMarkers._events, (index, event) => { event .visual() .getObject() @@ -608,7 +616,7 @@ function init(plot) { plot.showEvents = function() { plot.hideEvents(); - $.each(eventMarkers._events, function(index, event) { + $.each(eventMarkers._events, (index, event) => { event.hide(); }); From 0425b47791febe064b33c2ef2e75a7e6217877c7 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Mon, 11 Jun 2018 17:36:45 +0200 Subject: [PATCH 003/263] refactor Explore query field --- public/app/containers/Explore/QueryField.tsx | 26 ++-- public/app/containers/Explore/QueryRows.tsx | 4 + .../Explore/slate-plugins/prism/index.tsx | 21 +-- public/sass/_grafana.scss | 1 + public/sass/components/_slate_editor.scss | 146 +++++++++++++++++ public/sass/pages/_explore.scss | 147 ------------------ 6 files changed, 178 insertions(+), 167 deletions(-) create mode 100644 public/sass/components/_slate_editor.scss diff --git a/public/app/containers/Explore/QueryField.tsx b/public/app/containers/Explore/QueryField.tsx index 53354584fea8..c0d51ad9cfc9 100644 --- a/public/app/containers/Explore/QueryField.tsx +++ b/public/app/containers/Explore/QueryField.tsx @@ -9,7 +9,7 @@ import { getNextCharacter, getPreviousCousin } from './utils/dom'; import BracesPlugin from './slate-plugins/braces'; import ClearPlugin from './slate-plugins/clear'; import NewlinePlugin from './slate-plugins/newline'; -import PluginPrism, { configurePrismMetricsTokens } from './slate-plugins/prism/index'; +import PluginPrism, { setPrismTokens } from './slate-plugins/prism/index'; import RunnerPlugin from './slate-plugins/runner'; import debounce from './utils/debounce'; import { processLabels, RATE_RANGES, cleanText } from './utils/prometheus'; @@ -17,13 +17,13 @@ import { processLabels, RATE_RANGES, cleanText } from './utils/prometheus'; import Typeahead from './Typeahead'; const EMPTY_METRIC = ''; -const TYPEAHEAD_DEBOUNCE = 300; +export const TYPEAHEAD_DEBOUNCE = 300; function flattenSuggestions(s) { return s ? s.reduce((acc, g) => acc.concat(g.items), []) : []; } -const getInitialValue = query => +export const getInitialValue = query => Value.fromJSON({ document: { nodes: [ @@ -45,12 +45,14 @@ const getInitialValue = query => }, }); -class Portal extends React.Component { +class Portal extends React.Component { node: any; + constructor(props) { super(props); + const { index = 0, prefix = 'query' } = props; this.node = document.createElement('div'); - this.node.classList.add('explore-typeahead', `explore-typeahead-${props.index}`); + this.node.classList.add(`slate-typeahead`, `slate-typeahead-${prefix}-${index}`); document.body.appendChild(this.node); } @@ -71,12 +73,14 @@ class QueryField extends React.Component { constructor(props, context) { super(props, context); + const { prismDefinition = {}, prismLanguage = 'promql' } = props; + this.plugins = [ BracesPlugin(), ClearPlugin(), RunnerPlugin({ handler: props.onPressEnter }), NewlinePlugin(), - PluginPrism(), + PluginPrism({ definition: prismDefinition, language: prismLanguage }), ]; this.state = { @@ -131,7 +135,8 @@ class QueryField extends React.Component { if (!this.state.metrics) { return; } - configurePrismMetricsTokens(this.state.metrics); + setPrismTokens(this.props.language, 'metrics', this.state.metrics); + // Trigger re-render window.requestAnimationFrame(() => { // Bogus edit to trigger highlighting @@ -162,7 +167,7 @@ class QueryField extends React.Component { const selection = window.getSelection(); if (selection.anchorNode) { const wrapperNode = selection.anchorNode.parentElement; - const editorNode = wrapperNode.closest('.query-field'); + const editorNode = wrapperNode.closest('.slate-query-field'); if (!editorNode || this.state.value.isBlurred) { // Not inside this editor return; @@ -514,6 +519,7 @@ class QueryField extends React.Component { }; renderMenu = () => { + const { portalPrefix } = this.props; const { suggestions } = this.state; const hasSuggesstions = suggestions && suggestions.length > 0; if (!hasSuggesstions) { @@ -528,7 +534,7 @@ class QueryField extends React.Component { // Create typeahead in DOM root so we can later position it absolutely return ( - + { render() { return ( -
+
{this.renderMenu()} { @@ -58,9 +59,12 @@ class QueryRow extends PureComponent {
diff --git a/public/app/containers/Explore/slate-plugins/prism/index.tsx b/public/app/containers/Explore/slate-plugins/prism/index.tsx index 7c3fa296d8ec..d185518790f5 100644 --- a/public/app/containers/Explore/slate-plugins/prism/index.tsx +++ b/public/app/containers/Explore/slate-plugins/prism/index.tsx @@ -1,16 +1,12 @@ import React from 'react'; import Prism from 'prismjs'; -import Promql from './promql'; - -Prism.languages.promql = Promql; - const TOKEN_MARK = 'prism-token'; -export function configurePrismMetricsTokens(metrics) { - Prism.languages.promql.metric = { - alias: 'variable', - pattern: new RegExp(`(?:^|\\s)(${metrics.join('|')})(?:$|\\s)`), +export function setPrismTokens(language, field, values, alias = 'variable') { + Prism.languages[language][field] = { + alias, + pattern: new RegExp(`(?:^|\\s)(${values.join('|')})(?:$|\\s)`), }; } @@ -21,7 +17,12 @@ export function configurePrismMetricsTokens(metrics) { * (Adapted to handle nested grammar definitions.) */ -export default function PrismPlugin() { +export default function PrismPlugin({ definition, language }) { + if (definition) { + // Don't override exising modified definitions + Prism.languages[language] = Prism.languages[language] || definition; + } + return { /** * Render a Slate mark with appropiate CSS class names @@ -54,7 +55,7 @@ export default function PrismPlugin() { const texts = node.getTexts().toArray(); const tstring = texts.map(t => t.text).join('\n'); - const grammar = Prism.languages.promql; + const grammar = Prism.languages[language]; const tokens = Prism.tokenize(tstring, grammar); const decorations = []; let startText = texts.shift(); diff --git a/public/sass/_grafana.scss b/public/sass/_grafana.scss index afc869f8b154..9e3bec267edf 100644 --- a/public/sass/_grafana.scss +++ b/public/sass/_grafana.scss @@ -67,6 +67,7 @@ @import 'components/filter-list'; @import 'components/filter-table'; @import 'components/old_stuff'; +@import 'components/slate_editor'; @import 'components/typeahead'; @import 'components/modals'; @import 'components/dropdown'; diff --git a/public/sass/components/_slate_editor.scss b/public/sass/components/_slate_editor.scss new file mode 100644 index 000000000000..7e832b0f0b80 --- /dev/null +++ b/public/sass/components/_slate_editor.scss @@ -0,0 +1,146 @@ +.slate-query-field { + font-size: $font-size-root; + font-family: $font-family-monospace; + height: auto; +} + +.slate-query-field-wrapper { + position: relative; + display: inline-block; + padding: 6px 7px 4px; + width: 100%; + cursor: text; + line-height: $line-height-base; + color: $text-color-weak; + background-color: $panel-bg; + background-image: none; + border: $panel-border; + border-radius: $border-radius; + transition: all 0.3s; +} + +.slate-typeahead { + .typeahead { + position: absolute; + z-index: auto; + top: -10000px; + left: -10000px; + opacity: 0; + border-radius: $border-radius; + transition: opacity 0.75s; + border: $panel-border; + max-height: calc(66vh); + overflow-y: scroll; + max-width: calc(66%); + overflow-x: hidden; + outline: none; + list-style: none; + background: $panel-bg; + color: $text-color; + transition: opacity 0.4s ease-out; + box-shadow: $typeahead-shadow; + } + + .typeahead-group__title { + color: $text-color-weak; + font-size: $font-size-sm; + line-height: $line-height-base; + padding: $input-padding-y $input-padding-x; + } + + .typeahead-item { + height: auto; + font-family: $font-family-monospace; + padding: $input-padding-y $input-padding-x; + padding-left: $input-padding-x-lg; + font-size: $font-size-sm; + text-overflow: ellipsis; + overflow: hidden; + z-index: 1; + display: block; + white-space: nowrap; + cursor: pointer; + transition: color 0.3s cubic-bezier(0.645, 0.045, 0.355, 1), border-color 0.3s cubic-bezier(0.645, 0.045, 0.355, 1), + background 0.3s cubic-bezier(0.645, 0.045, 0.355, 1), padding 0.15s cubic-bezier(0.645, 0.045, 0.355, 1); + } + + .typeahead-item__selected { + background-color: $typeahead-selected-bg; + color: $typeahead-selected-color; + } +} + +/* SYNTAX */ + +.slate-query-field { + .token.comment, + .token.block-comment, + .token.prolog, + .token.doctype, + .token.cdata { + color: $text-color-weak; + } + + .token.punctuation { + color: $text-color-weak; + } + + .token.property, + .token.tag, + .token.boolean, + .token.number, + .token.function-name, + .token.constant, + .token.symbol, + .token.deleted { + color: $query-red; + } + + .token.selector, + .token.attr-name, + .token.string, + .token.char, + .token.function, + .token.builtin, + .token.inserted { + color: $query-green; + } + + .token.operator, + .token.entity, + .token.url, + .token.variable { + color: $query-purple; + } + + .token.atrule, + .token.attr-value, + .token.keyword, + .token.class-name { + color: $query-blue; + } + + .token.regex, + .token.important { + color: $query-orange; + } + + .token.important { + font-weight: normal; + } + + .token.bold { + font-weight: bold; + } + .token.italic { + font-style: italic; + } + + .token.entity { + cursor: help; + } + + .namespace { + opacity: 0.7; + } +} diff --git a/public/sass/pages/_explore.scss b/public/sass/pages/_explore.scss index 7dacccf6a874..876260c4f76a 100644 --- a/public/sass/pages/_explore.scss +++ b/public/sass/pages/_explore.scss @@ -93,150 +93,3 @@ .query-row-tools { width: 4rem; } - -.query-field { - font-size: $font-size-root; - font-family: $font-family-monospace; - height: auto; -} - -.query-field-wrapper { - position: relative; - display: inline-block; - padding: 6px 7px 4px; - width: 100%; - cursor: text; - line-height: $line-height-base; - color: $text-color-weak; - background-color: $panel-bg; - background-image: none; - border: $panel-border; - border-radius: $border-radius; - transition: all 0.3s; -} - -.explore-typeahead { - .typeahead { - position: absolute; - z-index: auto; - top: -10000px; - left: -10000px; - opacity: 0; - border-radius: $border-radius; - transition: opacity 0.75s; - border: $panel-border; - max-height: calc(66vh); - overflow-y: scroll; - max-width: calc(66%); - overflow-x: hidden; - outline: none; - list-style: none; - background: $panel-bg; - color: $text-color; - transition: opacity 0.4s ease-out; - box-shadow: $typeahead-shadow; - } - - .typeahead-group__title { - color: $text-color-weak; - font-size: $font-size-sm; - line-height: $line-height-base; - padding: $input-padding-y $input-padding-x; - } - - .typeahead-item { - height: auto; - font-family: $font-family-monospace; - padding: $input-padding-y $input-padding-x; - padding-left: $input-padding-x-lg; - font-size: $font-size-sm; - text-overflow: ellipsis; - overflow: hidden; - z-index: 1; - display: block; - white-space: nowrap; - cursor: pointer; - transition: color 0.3s cubic-bezier(0.645, 0.045, 0.355, 1), border-color 0.3s cubic-bezier(0.645, 0.045, 0.355, 1), - background 0.3s cubic-bezier(0.645, 0.045, 0.355, 1), padding 0.15s cubic-bezier(0.645, 0.045, 0.355, 1); - } - - .typeahead-item__selected { - background-color: $typeahead-selected-bg; - color: $typeahead-selected-color; - } -} - -/* SYNTAX */ - -.explore { - .token.comment, - .token.block-comment, - .token.prolog, - .token.doctype, - .token.cdata { - color: $text-color-weak; - } - - .token.punctuation { - color: $text-color-weak; - } - - .token.property, - .token.tag, - .token.boolean, - .token.number, - .token.function-name, - .token.constant, - .token.symbol, - .token.deleted { - color: $query-red; - } - - .token.selector, - .token.attr-name, - .token.string, - .token.char, - .token.function, - .token.builtin, - .token.inserted { - color: $query-green; - } - - .token.operator, - .token.entity, - .token.url, - .token.variable { - color: $query-purple; - } - - .token.atrule, - .token.attr-value, - .token.keyword, - .token.class-name { - color: $query-blue; - } - - .token.regex, - .token.important { - color: $query-orange; - } - - .token.important { - font-weight: normal; - } - - .token.bold { - font-weight: bold; - } - .token.italic { - font-style: italic; - } - - .token.entity { - cursor: help; - } - - .namespace { - opacity: 0.7; - } -} From 4113f7db470ae53f7ec5c502d877e74e9e15f383 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Tue, 12 Jun 2018 12:31:21 +0200 Subject: [PATCH 004/263] Trigger typeahead on Ctrl-Space --- public/app/containers/Explore/QueryField.tsx | 55 +++++++++++++------- 1 file changed, 35 insertions(+), 20 deletions(-) diff --git a/public/app/containers/Explore/QueryField.tsx b/public/app/containers/Explore/QueryField.tsx index c0d51ad9cfc9..8b0287e9b378 100644 --- a/public/app/containers/Explore/QueryField.tsx +++ b/public/app/containers/Explore/QueryField.tsx @@ -335,20 +335,30 @@ class QueryField extends React.Component { } onKeyDown = (event, change) => { - if (this.menuEl) { - const { typeaheadIndex, suggestions } = this.state; - - switch (event.key) { - case 'Escape': { - if (this.menuEl) { - event.preventDefault(); - this.resetTypeahead(); - return true; - } - break; + const { typeaheadIndex, suggestions } = this.state; + + switch (event.key) { + case 'Escape': { + if (this.menuEl) { + event.preventDefault(); + event.stopPropagation(); + this.resetTypeahead(); + return true; + } + break; + } + + case ' ': { + if (event.ctrlKey) { + event.preventDefault(); + this.handleTypeahead(); + return true; } + break; + } - case 'Tab': { + case 'Tab': { + if (this.menuEl) { // Dont blur input event.preventDefault(); if (!suggestions || suggestions.length === 0) { @@ -364,25 +374,30 @@ class QueryField extends React.Component { this.applyTypeahead(change, suggestion); return true; } + break; + } - case 'ArrowDown': { + case 'ArrowDown': { + if (this.menuEl) { // Select next suggestion event.preventDefault(); this.setState({ typeaheadIndex: typeaheadIndex + 1 }); - break; } + break; + } - case 'ArrowUp': { + case 'ArrowUp': { + if (this.menuEl) { // Select previous suggestion event.preventDefault(); this.setState({ typeaheadIndex: Math.max(0, typeaheadIndex - 1) }); - break; } + break; + } - default: { - // console.log('default key', event.key, event.which, event.charCode, event.locale, data.key); - break; - } + default: { + // console.log('default key', event.key, event.which, event.charCode, event.locale, data.key); + break; } } return undefined; From 2ebda4bf4d6b9ac8b4983ad2ec6087dff7f50979 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Tue, 12 Jun 2018 14:53:09 +0200 Subject: [PATCH 005/263] Make suggestions an object --- public/app/containers/Explore/QueryField.tsx | 4 +++- public/app/containers/Explore/Typeahead.tsx | 14 +++++++++++--- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/public/app/containers/Explore/QueryField.tsx b/public/app/containers/Explore/QueryField.tsx index 8b0287e9b378..f241e9f062aa 100644 --- a/public/app/containers/Explore/QueryField.tsx +++ b/public/app/containers/Explore/QueryField.tsx @@ -545,7 +545,9 @@ class QueryField extends React.Component { let selectedIndex = Math.max(this.state.typeaheadIndex, 0); const flattenedSuggestions = flattenSuggestions(suggestions); selectedIndex = selectedIndex % flattenedSuggestions.length || 0; - const selectedKeys = flattenedSuggestions.length > 0 ? [flattenedSuggestions[selectedIndex]] : []; + const selectedKeys = (flattenedSuggestions.length > 0 ? [flattenedSuggestions[selectedIndex]] : []).map( + i => (typeof i === 'object' ? i.text : i) + ); // Create typeahead in DOM root so we can later position it absolutely return ( diff --git a/public/app/containers/Explore/Typeahead.tsx b/public/app/containers/Explore/Typeahead.tsx index 4943622fe4e0..e7f0a9a0dcef 100644 --- a/public/app/containers/Explore/Typeahead.tsx +++ b/public/app/containers/Explore/Typeahead.tsx @@ -41,9 +41,17 @@ class TypeaheadGroup extends React.PureComponent {
  • {label}
      - {items.map(item => ( - -1} label={item} /> - ))} + {items.map(item => { + const text = typeof item === 'object' ? item.text : item; + return ( + -1} + label={text} + /> + ); + })}
  • ); From 73ddf2c3ed5d119ea97463871256a60714300dc2 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Tue, 12 Jun 2018 17:12:03 +0200 Subject: [PATCH 006/263] hint support for typeahead --- public/app/containers/Explore/Typeahead.tsx | 7 +++++-- public/sass/components/_slate_editor.scss | 5 +++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/public/app/containers/Explore/Typeahead.tsx b/public/app/containers/Explore/Typeahead.tsx index e7f0a9a0dcef..44fce7f8c7eb 100644 --- a/public/app/containers/Explore/Typeahead.tsx +++ b/public/app/containers/Explore/Typeahead.tsx @@ -23,12 +23,13 @@ class TypeaheadItem extends React.PureComponent { }; render() { - const { isSelected, label, onClickItem } = this.props; + const { hint, isSelected, label, onClickItem } = this.props; const className = isSelected ? 'typeahead-item typeahead-item__selected' : 'typeahead-item'; const onClick = () => onClickItem(label); return (
  • {label} + {hint && isSelected ?
    {hint}
    : null}
  • ); } @@ -43,12 +44,14 @@ class TypeaheadGroup extends React.PureComponent {
      {items.map(item => { const text = typeof item === 'object' ? item.text : item; + const label = typeof item === 'object' ? item.display || item.text : item; return ( -1} - label={text} + hint={item.hint} + label={label} /> ); })} diff --git a/public/sass/components/_slate_editor.scss b/public/sass/components/_slate_editor.scss index 7e832b0f0b80..de8a6e6d7214 100644 --- a/public/sass/components/_slate_editor.scss +++ b/public/sass/components/_slate_editor.scss @@ -67,6 +67,11 @@ .typeahead-item__selected { background-color: $typeahead-selected-bg; color: $typeahead-selected-color; + + .typeahead-item-hint { + font-size: $font-size-xs; + color: $text-color; + } } } From b2f497b100b7b94a07a22d594b26cf14e89273ae Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Tue, 12 Jun 2018 19:02:02 +0200 Subject: [PATCH 007/263] batch DOM reads from query field typeahead --- public/app/containers/Explore/QueryField.tsx | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/public/app/containers/Explore/QueryField.tsx b/public/app/containers/Explore/QueryField.tsx index f241e9f062aa..bbfbb067d0ba 100644 --- a/public/app/containers/Explore/QueryField.tsx +++ b/public/app/containers/Explore/QueryField.tsx @@ -522,10 +522,17 @@ class QueryField extends React.Component { // Align menu overlay to editor node if (node) { + // Read from DOM const rect = node.parentElement.getBoundingClientRect(); - menu.style.opacity = 1; - menu.style.top = `${rect.top + window.scrollY + rect.height + 4}px`; - menu.style.left = `${rect.left + window.scrollX - 2}px`; + const scrollX = window.scrollX; + const scrollY = window.scrollY; + + // Write DOM + requestAnimationFrame(() => { + menu.style.opacity = 1; + menu.style.top = `${rect.top + scrollY + rect.height + 4}px`; + menu.style.left = `${rect.left + scrollX - 2}px`; + }); } }; From bbe6ab509639c6f3ddffe0715e95ce52aad3fb4c Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Thu, 14 Jun 2018 10:31:09 +0100 Subject: [PATCH 008/263] Fix Queryfield metrics field missing --- public/app/containers/Explore/QueryField.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/containers/Explore/QueryField.tsx b/public/app/containers/Explore/QueryField.tsx index bbfbb067d0ba..bedb955b9b9e 100644 --- a/public/app/containers/Explore/QueryField.tsx +++ b/public/app/containers/Explore/QueryField.tsx @@ -135,7 +135,7 @@ class QueryField extends React.Component { if (!this.state.metrics) { return; } - setPrismTokens(this.props.language, 'metrics', this.state.metrics); + setPrismTokens(this.props.prismLanguage, 'metrics', this.state.metrics); // Trigger re-render window.requestAnimationFrame(() => { From a9e1e5f346cf23f1ae6a137e3560e72d61f278fb Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Thu, 14 Jun 2018 11:41:01 +0100 Subject: [PATCH 009/263] Fix queryfield wrapper css --- public/app/containers/Explore/QueryRows.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/containers/Explore/QueryRows.tsx b/public/app/containers/Explore/QueryRows.tsx index 24079ac0f690..a968e1e2c641 100644 --- a/public/app/containers/Explore/QueryRows.tsx +++ b/public/app/containers/Explore/QueryRows.tsx @@ -56,7 +56,7 @@ class QueryRow extends PureComponent {
    -
    +
    Date: Fri, 15 Jun 2018 10:11:32 -0300 Subject: [PATCH 010/263] Adding Cloudwatch AWS/AppSync metrics and dimensions --- pkg/tsdb/cloudwatch/metric_find_query.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index 136ee241c2e5..12c2aba4681b 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -86,6 +86,7 @@ func init() { "AWS/Kinesis": {"GetRecords.Bytes", "GetRecords.IteratorAge", "GetRecords.IteratorAgeMilliseconds", "GetRecords.Latency", "GetRecords.Records", "GetRecords.Success", "IncomingBytes", "IncomingRecords", "PutRecord.Bytes", "PutRecord.Latency", "PutRecord.Success", "PutRecords.Bytes", "PutRecords.Latency", "PutRecords.Records", "PutRecords.Success", "ReadProvisionedThroughputExceeded", "WriteProvisionedThroughputExceeded", "IteratorAgeMilliseconds", "OutgoingBytes", "OutgoingRecords"}, "AWS/KinesisAnalytics": {"Bytes", "MillisBehindLatest", "Records", "Success"}, "AWS/Lambda": {"Invocations", "Errors", "Duration", "Throttles", "IteratorAge"}, + "AWS/AppSync": {"Latency", "4XXError", "5XXError"}, "AWS/Logs": {"IncomingBytes", "IncomingLogEvents", "ForwardedBytes", "ForwardedLogEvents", "DeliveryErrors", "DeliveryThrottling"}, "AWS/ML": {"PredictCount", "PredictFailureCount"}, "AWS/NATGateway": {"PacketsOutToDestination", "PacketsOutToSource", "PacketsInFromSource", "PacketsInFromDestination", "BytesOutToDestination", "BytesOutToSource", "BytesInFromSource", "BytesInFromDestination", "ErrorPortAllocation", "ActiveConnectionCount", "ConnectionAttemptCount", "ConnectionEstablishedCount", "IdleTimeoutCount", "PacketsDropCount"}, @@ -135,6 +136,7 @@ func init() { "AWS/Kinesis": {"StreamName", "ShardId"}, "AWS/KinesisAnalytics": {"Flow", "Id", "Application"}, "AWS/Lambda": {"FunctionName", "Resource", "Version", "Alias"}, + "AWS/AppSync": {"GraphQLAPIId"}, "AWS/Logs": {"LogGroupName", "DestinationType", "FilterName"}, "AWS/ML": {"MLModelId", "RequestMode"}, "AWS/NATGateway": {"NatGatewayId"}, From 1dd65f7a39f0f25985551bc4ebff671f70493a5a Mon Sep 17 00:00:00 2001 From: Aman Date: Wed, 9 May 2018 13:32:44 +0530 Subject: [PATCH 011/263] Add options to colorize prefix and postfix in singlestat --- .../app/plugins/panel/singlestat/editor.html | 4 ++++ public/app/plugins/panel/singlestat/module.ts | 19 ++++++++++++------- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/public/app/plugins/panel/singlestat/editor.html b/public/app/plugins/panel/singlestat/editor.html index 15f4e6a9efab..3b75cf87eaee 100644 --- a/public/app/plugins/panel/singlestat/editor.html +++ b/public/app/plugins/panel/singlestat/editor.html @@ -58,6 +58,10 @@
    Coloring
    +
    + + +
    @@ -39,7 +39,7 @@
    Value
    - +
    @@ -59,8 +59,8 @@
    Coloring
    - - + +
    diff --git a/public/app/plugins/panel/singlestat/module.ts b/public/app/plugins/panel/singlestat/module.ts index 0cf3a3a95a7d..ebd2628b0864 100644 --- a/public/app/plugins/panel/singlestat/module.ts +++ b/public/app/plugins/panel/singlestat/module.ts @@ -198,8 +198,8 @@ class SingleStatCtrl extends MetricsPanelCtrl { this.setValueMapping(data); } - canChangeFontSize() { - return this.panel.gauge.show; + canModifyText() { + return !this.panel.gauge.show; } setColoring(options) { From c2381f088f69637c48ee8ced84d8604e71436013 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 28 Jun 2018 12:02:49 +0200 Subject: [PATCH 014/263] Add disabled styles for checked checkbox (#12422) --- public/sass/components/_switch.scss | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/public/sass/components/_switch.scss b/public/sass/components/_switch.scss index c7eb19141037..6eb01ecc32d2 100644 --- a/public/sass/components/_switch.scss +++ b/public/sass/components/_switch.scss @@ -64,8 +64,8 @@ } input + label::before { - font-family: "FontAwesome"; - content: "\f096"; // square-o + font-family: 'FontAwesome'; + content: '\f096'; // square-o color: $text-color-weak; transition: transform 0.4s; backface-visibility: hidden; @@ -73,11 +73,11 @@ } input + label::after { - content: "\f046"; // check-square-o + content: '\f046'; // check-square-o color: $orange; text-shadow: $text-shadow-strong; - font-family: "FontAwesome"; + font-family: 'FontAwesome'; transition: transform 0.4s; transform: rotateY(180deg); backface-visibility: hidden; @@ -154,7 +154,8 @@ gf-form-switch[disabled] { .gf-form-switch input + label { cursor: default; pointer-events: none !important; - &::before { + &::before, + &::after { color: $text-color-faint; text-shadow: none; } From 3056d9a80eda8414539ff9c96ba2f32a219d2149 Mon Sep 17 00:00:00 2001 From: Dan Cech Date: Thu, 28 Jun 2018 12:08:32 +0200 Subject: [PATCH 015/263] support passing api token in Basic auth password (#12416) --- docs/sources/http_api/auth.md | 8 ++++++++ pkg/middleware/auth.go | 6 ++++++ pkg/middleware/middleware_test.go | 26 ++++++++++++++++++++++++-- 3 files changed, 38 insertions(+), 2 deletions(-) diff --git a/docs/sources/http_api/auth.md b/docs/sources/http_api/auth.md index 166a5a4fdb92..8ff40b5ef04e 100644 --- a/docs/sources/http_api/auth.md +++ b/docs/sources/http_api/auth.md @@ -44,6 +44,14 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk The `Authorization` header value should be `Bearer `. +The API Token can also be passed as a Basic authorization password with the special username `api_key`: + +curl example: +```bash +?curl http://api_key:eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk@localhost:3000/api/org +{"id":1,"name":"Main Org."} +``` + # Auth HTTP resources / actions ## Api Keys diff --git a/pkg/middleware/auth.go b/pkg/middleware/auth.go index 37e79c010713..5faee1e3fa7a 100644 --- a/pkg/middleware/auth.go +++ b/pkg/middleware/auth.go @@ -9,6 +9,7 @@ import ( m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/session" "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" ) type AuthOptions struct { @@ -34,6 +35,11 @@ func getApiKey(c *m.ReqContext) string { return key } + username, password, err := util.DecodeBasicAuthHeader(header) + if err == nil && username == "api_key" { + return password + } + return "" } diff --git a/pkg/middleware/middleware_test.go b/pkg/middleware/middleware_test.go index 0b50358ad737..d82b7313585c 100644 --- a/pkg/middleware/middleware_test.go +++ b/pkg/middleware/middleware_test.go @@ -82,7 +82,7 @@ func TestMiddlewareContext(t *testing.T) { setting.BasicAuthEnabled = true authHeader := util.GetBasicAuthHeader("myUser", "myPass") - sc.fakeReq("GET", "/").withAuthoriziationHeader(authHeader).exec() + sc.fakeReq("GET", "/").withAuthorizationHeader(authHeader).exec() Convey("Should init middleware context with user", func() { So(sc.context.IsSignedIn, ShouldEqual, true) @@ -128,6 +128,28 @@ func TestMiddlewareContext(t *testing.T) { }) }) + middlewareScenario("Valid api key via Basic auth", func(sc *scenarioContext) { + keyhash := util.EncodePassword("v5nAwpMafFP6znaS4urhdWDLS5511M42", "asd") + + bus.AddHandler("test", func(query *m.GetApiKeyByNameQuery) error { + query.Result = &m.ApiKey{OrgId: 12, Role: m.ROLE_EDITOR, Key: keyhash} + return nil + }) + + authHeader := util.GetBasicAuthHeader("api_key", "eyJrIjoidjVuQXdwTWFmRlA2em5hUzR1cmhkV0RMUzU1MTFNNDIiLCJuIjoiYXNkIiwiaWQiOjF9") + sc.fakeReq("GET", "/").withAuthorizationHeader(authHeader).exec() + + Convey("Should return 200", func() { + So(sc.resp.Code, ShouldEqual, 200) + }) + + Convey("Should init middleware context", func() { + So(sc.context.IsSignedIn, ShouldEqual, true) + So(sc.context.OrgId, ShouldEqual, 12) + So(sc.context.OrgRole, ShouldEqual, m.ROLE_EDITOR) + }) + }) + middlewareScenario("UserId in session", func(sc *scenarioContext) { sc.fakeReq("GET", "/").handler(func(c *m.ReqContext) { @@ -473,7 +495,7 @@ func (sc *scenarioContext) withInvalidApiKey() *scenarioContext { return sc } -func (sc *scenarioContext) withAuthoriziationHeader(authHeader string) *scenarioContext { +func (sc *scenarioContext) withAuthorizationHeader(authHeader string) *scenarioContext { sc.authHeader = authHeader return sc } From 443ff5deb4b010f321d41f04f676543c6c2bdb8e Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 28 Jun 2018 12:10:36 +0200 Subject: [PATCH 016/263] Karma to Jest: value_select_dropdown (#12435) * Begin Karma 2 Jest: value_select_dropdown * Add return for Promise * Remove Karma test --- .../core/directives/value_select_dropdown.ts | 4 +- .../core/specs/value_select_dropdown.jest.ts | 159 ++++++++++++++++ .../core/specs/value_select_dropdown_specs.ts | 171 ------------------ 3 files changed, 161 insertions(+), 173 deletions(-) create mode 100644 public/app/core/specs/value_select_dropdown.jest.ts delete mode 100644 public/app/core/specs/value_select_dropdown_specs.ts diff --git a/public/app/core/directives/value_select_dropdown.ts b/public/app/core/directives/value_select_dropdown.ts index d6c6c3af5c50..d384904c2d85 100644 --- a/public/app/core/directives/value_select_dropdown.ts +++ b/public/app/core/directives/value_select_dropdown.ts @@ -93,7 +93,7 @@ export class ValueSelectDropdownCtrl { tagValuesPromise = this.$q.when(tag.values); } - tagValuesPromise.then(values => { + return tagValuesPromise.then(values => { tag.values = values; tag.valuesText = values.join(' + '); _.each(this.options, option => { @@ -132,7 +132,7 @@ export class ValueSelectDropdownCtrl { this.highlightIndex = (this.highlightIndex + direction) % this.search.options.length; } - selectValue(option, event, commitChange, excludeOthers) { + selectValue(option, event, commitChange?, excludeOthers?) { if (!option) { return; } diff --git a/public/app/core/specs/value_select_dropdown.jest.ts b/public/app/core/specs/value_select_dropdown.jest.ts new file mode 100644 index 000000000000..3cc310435b7f --- /dev/null +++ b/public/app/core/specs/value_select_dropdown.jest.ts @@ -0,0 +1,159 @@ +import 'app/core/directives/value_select_dropdown'; +import { ValueSelectDropdownCtrl } from '../directives/value_select_dropdown'; +import q from 'q'; + +describe('SelectDropdownCtrl', () => { + let tagValuesMap: any = {}; + + ValueSelectDropdownCtrl.prototype.onUpdated = jest.fn(); + let ctrl; + + describe('Given simple variable', () => { + beforeEach(() => { + ctrl = new ValueSelectDropdownCtrl(q); + ctrl.variable = { + current: { text: 'hej', value: 'hej' }, + getValuesForTag: key => { + return Promise.resolve(tagValuesMap[key]); + }, + }; + ctrl.init(); + }); + + it('Should init labelText and linkText', () => { + expect(ctrl.linkText).toBe('hej'); + }); + }); + + describe('Given variable with tags and dropdown is opened', () => { + beforeEach(() => { + ctrl = new ValueSelectDropdownCtrl(q); + ctrl.variable = { + current: { text: 'server-1', value: 'server-1' }, + options: [ + { text: 'server-1', value: 'server-1', selected: true }, + { text: 'server-2', value: 'server-2' }, + { text: 'server-3', value: 'server-3' }, + ], + tags: ['key1', 'key2', 'key3'], + getValuesForTag: key => { + return Promise.resolve(tagValuesMap[key]); + }, + multi: true, + }; + tagValuesMap.key1 = ['server-1', 'server-3']; + tagValuesMap.key2 = ['server-2', 'server-3']; + tagValuesMap.key3 = ['server-1', 'server-2', 'server-3']; + ctrl.init(); + ctrl.show(); + }); + + it('should init tags model', () => { + expect(ctrl.tags.length).toBe(3); + expect(ctrl.tags[0].text).toBe('key1'); + }); + + it('should init options model', () => { + expect(ctrl.options.length).toBe(3); + }); + + it('should init selected values array', () => { + expect(ctrl.selectedValues.length).toBe(1); + }); + + it('should set linkText', () => { + expect(ctrl.linkText).toBe('server-1'); + }); + + describe('after adititional value is selected', () => { + beforeEach(() => { + ctrl.selectValue(ctrl.options[2], {}); + ctrl.commitChanges(); + }); + + it('should update link text', () => { + expect(ctrl.linkText).toBe('server-1 + server-3'); + }); + }); + + describe('When tag is selected', () => { + beforeEach(async () => { + await ctrl.selectTag(ctrl.tags[0]); + ctrl.commitChanges(); + }); + + it('should select tag', () => { + expect(ctrl.selectedTags.length).toBe(1); + }); + + it('should select values', () => { + expect(ctrl.options[0].selected).toBe(true); + expect(ctrl.options[2].selected).toBe(true); + }); + + it('link text should not include tag values', () => { + expect(ctrl.linkText).toBe(''); + }); + + describe('and then dropdown is opened and closed without changes', () => { + beforeEach(() => { + ctrl.show(); + ctrl.commitChanges(); + }); + + it('should still have selected tag', () => { + expect(ctrl.selectedTags.length).toBe(1); + }); + }); + + describe('and then unselected', () => { + beforeEach(async () => { + await ctrl.selectTag(ctrl.tags[0]); + }); + + it('should deselect tag', () => { + expect(ctrl.selectedTags.length).toBe(0); + }); + }); + + describe('and then value is unselected', () => { + beforeEach(() => { + ctrl.selectValue(ctrl.options[0], {}); + }); + + it('should deselect tag', () => { + expect(ctrl.selectedTags.length).toBe(0); + }); + }); + }); + }); + + describe('Given variable with selected tags', () => { + beforeEach(() => { + ctrl = new ValueSelectDropdownCtrl(q); + ctrl.variable = { + current: { + text: 'server-1', + value: 'server-1', + tags: [{ text: 'key1', selected: true }], + }, + options: [ + { text: 'server-1', value: 'server-1' }, + { text: 'server-2', value: 'server-2' }, + { text: 'server-3', value: 'server-3' }, + ], + tags: ['key1', 'key2', 'key3'], + getValuesForTag: key => { + return Promise.resolve(tagValuesMap[key]); + }, + multi: true, + }; + ctrl.init(); + ctrl.show(); + }); + + it('should set tag as selected', () => { + expect(ctrl.tags[0].selected).toBe(true); + }); + }); +}); diff --git a/public/app/core/specs/value_select_dropdown_specs.ts b/public/app/core/specs/value_select_dropdown_specs.ts deleted file mode 100644 index 8f6408fb389c..000000000000 --- a/public/app/core/specs/value_select_dropdown_specs.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { describe, beforeEach, it, expect, angularMocks, sinon } from 'test/lib/common'; -import 'app/core/directives/value_select_dropdown'; - -describe('SelectDropdownCtrl', function() { - var scope; - var ctrl; - var tagValuesMap: any = {}; - var rootScope; - var q; - - beforeEach(angularMocks.module('grafana.core')); - beforeEach( - angularMocks.inject(function($controller, $rootScope, $q, $httpBackend) { - rootScope = $rootScope; - q = $q; - scope = $rootScope.$new(); - ctrl = $controller('ValueSelectDropdownCtrl', { $scope: scope }); - ctrl.onUpdated = sinon.spy(); - $httpBackend.when('GET', /\.html$/).respond(''); - }) - ); - - describe('Given simple variable', function() { - beforeEach(function() { - ctrl.variable = { - current: { text: 'hej', value: 'hej' }, - getValuesForTag: function(key) { - return q.when(tagValuesMap[key]); - }, - }; - ctrl.init(); - }); - - it('Should init labelText and linkText', function() { - expect(ctrl.linkText).to.be('hej'); - }); - }); - - describe('Given variable with tags and dropdown is opened', function() { - beforeEach(function() { - ctrl.variable = { - current: { text: 'server-1', value: 'server-1' }, - options: [ - { text: 'server-1', value: 'server-1', selected: true }, - { text: 'server-2', value: 'server-2' }, - { text: 'server-3', value: 'server-3' }, - ], - tags: ['key1', 'key2', 'key3'], - getValuesForTag: function(key) { - return q.when(tagValuesMap[key]); - }, - multi: true, - }; - tagValuesMap.key1 = ['server-1', 'server-3']; - tagValuesMap.key2 = ['server-2', 'server-3']; - tagValuesMap.key3 = ['server-1', 'server-2', 'server-3']; - ctrl.init(); - ctrl.show(); - }); - - it('should init tags model', function() { - expect(ctrl.tags.length).to.be(3); - expect(ctrl.tags[0].text).to.be('key1'); - }); - - it('should init options model', function() { - expect(ctrl.options.length).to.be(3); - }); - - it('should init selected values array', function() { - expect(ctrl.selectedValues.length).to.be(1); - }); - - it('should set linkText', function() { - expect(ctrl.linkText).to.be('server-1'); - }); - - describe('after adititional value is selected', function() { - beforeEach(function() { - ctrl.selectValue(ctrl.options[2], {}); - ctrl.commitChanges(); - }); - - it('should update link text', function() { - expect(ctrl.linkText).to.be('server-1 + server-3'); - }); - }); - - describe('When tag is selected', function() { - beforeEach(function() { - ctrl.selectTag(ctrl.tags[0]); - rootScope.$digest(); - ctrl.commitChanges(); - }); - - it('should select tag', function() { - expect(ctrl.selectedTags.length).to.be(1); - }); - - it('should select values', function() { - expect(ctrl.options[0].selected).to.be(true); - expect(ctrl.options[2].selected).to.be(true); - }); - - it('link text should not include tag values', function() { - expect(ctrl.linkText).to.be(''); - }); - - describe('and then dropdown is opened and closed without changes', function() { - beforeEach(function() { - ctrl.show(); - ctrl.commitChanges(); - rootScope.$digest(); - }); - - it('should still have selected tag', function() { - expect(ctrl.selectedTags.length).to.be(1); - }); - }); - - describe('and then unselected', function() { - beforeEach(function() { - ctrl.selectTag(ctrl.tags[0]); - rootScope.$digest(); - }); - - it('should deselect tag', function() { - expect(ctrl.selectedTags.length).to.be(0); - }); - }); - - describe('and then value is unselected', function() { - beforeEach(function() { - ctrl.selectValue(ctrl.options[0], {}); - }); - - it('should deselect tag', function() { - expect(ctrl.selectedTags.length).to.be(0); - }); - }); - }); - }); - - describe('Given variable with selected tags', function() { - beforeEach(function() { - ctrl.variable = { - current: { - text: 'server-1', - value: 'server-1', - tags: [{ text: 'key1', selected: true }], - }, - options: [ - { text: 'server-1', value: 'server-1' }, - { text: 'server-2', value: 'server-2' }, - { text: 'server-3', value: 'server-3' }, - ], - tags: ['key1', 'key2', 'key3'], - getValuesForTag: function(key) { - return q.when(tagValuesMap[key]); - }, - multi: true, - }; - ctrl.init(); - ctrl.show(); - }); - - it('should set tag as selected', function() { - expect(ctrl.tags[0].selected).to.be(true); - }); - }); -}); From 7a7c6f8fab04d622fda04d65fdca1a7914b3f5a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Thu, 28 Jun 2018 04:38:23 -0700 Subject: [PATCH 017/263] fix: log close/flush was done too early, before server shutdown log message was called, fixes #12438 --- pkg/cmd/grafana-server/main.go | 4 ---- pkg/cmd/grafana-server/server.go | 2 ++ 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go index 976c027d749a..e64f42a73208 100644 --- a/pkg/cmd/grafana-server/main.go +++ b/pkg/cmd/grafana-server/main.go @@ -14,7 +14,6 @@ import ( "net/http" _ "net/http/pprof" - "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/setting" @@ -88,9 +87,6 @@ func main() { err := server.Run() - trace.Stop() - log.Close() - server.Exit(err) } diff --git a/pkg/cmd/grafana-server/server.go b/pkg/cmd/grafana-server/server.go index 6444528f7f04..a4543ef1f20d 100644 --- a/pkg/cmd/grafana-server/server.go +++ b/pkg/cmd/grafana-server/server.go @@ -185,6 +185,8 @@ func (g *GrafanaServerImpl) Exit(reason error) { } g.log.Error("Server shutdown", "reason", reason) + + log.Close() os.Exit(code) } From af0a4a60394821b605d03575f3e756c1f0c16b6e Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 28 Jun 2018 13:43:23 +0200 Subject: [PATCH 018/263] Karma to Jest: 3 test files (#12414) * Karma to Jest: viewstate_srv * Karma to Jest: annotations_srv * Remove comments * Karma to Jest: series_override_ctrl * Remove unnecessary code * Class to function and fix lint error * Fix ngInject --- ...s_srv_specs.ts => annotations_srv.jest.ts} | 22 +- .../dashboard/specs/viewstate_srv.jest.ts | 67 +++++ .../dashboard/specs/viewstate_srv_specs.ts | 65 ---- .../panel/graph/series_overrides_ctrl.ts | 280 +++++++++--------- .../graph/specs/series_override_ctrl.jest.ts | 42 +++ .../graph/specs/series_override_ctrl_specs.ts | 55 ---- 6 files changed, 259 insertions(+), 272 deletions(-) rename public/app/features/annotations/specs/{annotations_srv_specs.ts => annotations_srv.jest.ts} (52%) create mode 100644 public/app/features/dashboard/specs/viewstate_srv.jest.ts delete mode 100644 public/app/features/dashboard/specs/viewstate_srv_specs.ts create mode 100644 public/app/plugins/panel/graph/specs/series_override_ctrl.jest.ts delete mode 100644 public/app/plugins/panel/graph/specs/series_override_ctrl_specs.ts diff --git a/public/app/features/annotations/specs/annotations_srv_specs.ts b/public/app/features/annotations/specs/annotations_srv.jest.ts similarity index 52% rename from public/app/features/annotations/specs/annotations_srv_specs.ts rename to public/app/features/annotations/specs/annotations_srv.jest.ts index 932fcf9415c7..7db7b6c9f05f 100644 --- a/public/app/features/annotations/specs/annotations_srv_specs.ts +++ b/public/app/features/annotations/specs/annotations_srv.jest.ts @@ -1,17 +1,17 @@ -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; import '../annotations_srv'; -import helpers from 'test/specs/helpers'; import 'app/features/dashboard/time_srv'; +import { AnnotationsSrv } from '../annotations_srv'; describe('AnnotationsSrv', function() { - var ctx = new helpers.ServiceTestContext(); + let $rootScope = { + onAppEvent: jest.fn(), + }; + let $q; + let datasourceSrv; + let backendSrv; + let timeSrv; - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach(ctx.createService('timeSrv')); - beforeEach(() => { - ctx.createService('annotationsSrv'); - }); + let annotationsSrv = new AnnotationsSrv($rootScope, $q, datasourceSrv, backendSrv, timeSrv); describe('When translating the query result', () => { const annotationSource = { @@ -30,11 +30,11 @@ describe('AnnotationsSrv', function() { let translatedAnnotations; beforeEach(() => { - translatedAnnotations = ctx.service.translateQueryResult(annotationSource, annotations); + translatedAnnotations = annotationsSrv.translateQueryResult(annotationSource, annotations); }); it('should set defaults', () => { - expect(translatedAnnotations[0].source).to.eql(annotationSource); + expect(translatedAnnotations[0].source).toEqual(annotationSource); }); }); }); diff --git a/public/app/features/dashboard/specs/viewstate_srv.jest.ts b/public/app/features/dashboard/specs/viewstate_srv.jest.ts new file mode 100644 index 000000000000..08166c6f2bd6 --- /dev/null +++ b/public/app/features/dashboard/specs/viewstate_srv.jest.ts @@ -0,0 +1,67 @@ +//import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; +import 'app/features/dashboard/view_state_srv'; +import config from 'app/core/config'; +import { DashboardViewState } from '../view_state_srv'; + +describe('when updating view state', () => { + let location = { + replace: jest.fn(), + search: jest.fn(), + }; + + let $scope = { + onAppEvent: jest.fn(() => {}), + dashboard: { + meta: {}, + panels: [], + }, + }; + + let $rootScope = {}; + let viewState; + + beforeEach(() => { + config.bootData = { + user: { + orgId: 1, + }, + }; + }); + + describe('to fullscreen true and edit true', () => { + beforeEach(() => { + location.search = jest.fn(() => { + return { fullscreen: true, edit: true, panelId: 1 }; + }); + viewState = new DashboardViewState($scope, location, {}, $rootScope); + }); + + it('should update querystring and view state', () => { + var updateState = { fullscreen: true, edit: true, panelId: 1 }; + + viewState.update(updateState); + + expect(location.search).toHaveBeenCalledWith({ + edit: true, + editview: null, + fullscreen: true, + orgId: 1, + panelId: 1, + }); + expect(viewState.dashboard.meta.fullscreen).toBe(true); + expect(viewState.state.fullscreen).toBe(true); + }); + }); + + describe('to fullscreen false', () => { + beforeEach(() => { + viewState = new DashboardViewState($scope, location, {}, $rootScope); + }); + it('should remove params from query string', () => { + viewState.update({ fullscreen: true, panelId: 1, edit: true }); + viewState.update({ fullscreen: false }); + expect(viewState.dashboard.meta.fullscreen).toBe(false); + expect(viewState.state.fullscreen).toBe(null); + }); + }); +}); diff --git a/public/app/features/dashboard/specs/viewstate_srv_specs.ts b/public/app/features/dashboard/specs/viewstate_srv_specs.ts deleted file mode 100644 index d34b15b9113e..000000000000 --- a/public/app/features/dashboard/specs/viewstate_srv_specs.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; -import 'app/features/dashboard/view_state_srv'; -import config from 'app/core/config'; - -describe('when updating view state', function() { - var viewState, location; - var timeSrv = {}; - var templateSrv = {}; - var contextSrv = { - user: { - orgId: 19, - }, - }; - beforeEach(function() { - config.bootData = { - user: { - orgId: 1, - }, - }; - }); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($provide) { - $provide.value('timeSrv', timeSrv); - $provide.value('templateSrv', templateSrv); - $provide.value('contextSrv', contextSrv); - }) - ); - - beforeEach( - angularMocks.inject(function(dashboardViewStateSrv, $location, $rootScope) { - $rootScope.onAppEvent = function() {}; - $rootScope.dashboard = { - meta: {}, - panels: [], - }; - viewState = dashboardViewStateSrv.create($rootScope); - location = $location; - }) - ); - - describe('to fullscreen true and edit true', function() { - it('should update querystring and view state', function() { - var updateState = { fullscreen: true, edit: true, panelId: 1 }; - viewState.update(updateState); - expect(location.search()).to.eql({ - fullscreen: true, - edit: true, - panelId: 1, - orgId: 1, - }); - expect(viewState.dashboard.meta.fullscreen).to.be(true); - expect(viewState.state.fullscreen).to.be(true); - }); - }); - - describe('to fullscreen false', function() { - it('should remove params from query string', function() { - viewState.update({ fullscreen: true, panelId: 1, edit: true }); - viewState.update({ fullscreen: false }); - expect(viewState.dashboard.meta.fullscreen).to.be(false); - expect(viewState.state.fullscreen).to.be(null); - }); - }); -}); diff --git a/public/app/plugins/panel/graph/series_overrides_ctrl.ts b/public/app/plugins/panel/graph/series_overrides_ctrl.ts index ecf79a8a4fb5..5958c80bac9a 100644 --- a/public/app/plugins/panel/graph/series_overrides_ctrl.ts +++ b/public/app/plugins/panel/graph/series_overrides_ctrl.ts @@ -1,160 +1,158 @@ import _ from 'lodash'; import angular from 'angular'; -export class SeriesOverridesCtrl { - /** @ngInject */ - constructor($scope, $element, popoverSrv) { - $scope.overrideMenu = []; - $scope.currentOverrides = []; - $scope.override = $scope.override || {}; - - $scope.addOverrideOption = function(name, propertyName, values) { - var option = { - text: name, - propertyName: propertyName, - index: $scope.overrideMenu.lenght, - values: values, - submenu: _.map(values, function(value) { - return { text: String(value), value: value }; - }), - }; +/** @ngInject */ +export function SeriesOverridesCtrl($scope, $element, popoverSrv) { + $scope.overrideMenu = []; + $scope.currentOverrides = []; + $scope.override = $scope.override || {}; - $scope.overrideMenu.push(option); + $scope.addOverrideOption = function(name, propertyName, values) { + var option = { + text: name, + propertyName: propertyName, + index: $scope.overrideMenu.lenght, + values: values, + submenu: _.map(values, function(value) { + return { text: String(value), value: value }; + }), }; - $scope.setOverride = function(item, subItem) { - // handle color overrides - if (item.propertyName === 'color') { - $scope.openColorSelector($scope.override['color']); - return; - } + $scope.overrideMenu.push(option); + }; - $scope.override[item.propertyName] = subItem.value; + $scope.setOverride = function(item, subItem) { + // handle color overrides + if (item.propertyName === 'color') { + $scope.openColorSelector($scope.override['color']); + return; + } - // automatically disable lines for this series and the fill below to series - // can be removed by the user if they still want lines - if (item.propertyName === 'fillBelowTo') { - $scope.override['lines'] = false; - $scope.ctrl.addSeriesOverride({ alias: subItem.value, lines: false }); - } + $scope.override[item.propertyName] = subItem.value; - $scope.updateCurrentOverrides(); - $scope.ctrl.render(); - }; + // automatically disable lines for this series and the fill below to series + // can be removed by the user if they still want lines + if (item.propertyName === 'fillBelowTo') { + $scope.override['lines'] = false; + $scope.ctrl.addSeriesOverride({ alias: subItem.value, lines: false }); + } - $scope.colorSelected = function(color) { - $scope.override['color'] = color; - $scope.updateCurrentOverrides(); - $scope.ctrl.render(); - }; + $scope.updateCurrentOverrides(); + $scope.ctrl.render(); + }; - $scope.openColorSelector = function(color) { - var fakeSeries = { color: color }; - popoverSrv.show({ - element: $element.find('.dropdown')[0], - position: 'top center', - openOn: 'click', - template: '', - model: { - autoClose: true, - colorSelected: $scope.colorSelected, - series: fakeSeries, - }, - onClose: function() { - $scope.ctrl.render(); - }, - }); - }; + $scope.colorSelected = function(color) { + $scope.override['color'] = color; + $scope.updateCurrentOverrides(); + $scope.ctrl.render(); + }; - $scope.removeOverride = function(option) { - delete $scope.override[option.propertyName]; - $scope.updateCurrentOverrides(); - $scope.ctrl.refresh(); - }; + $scope.openColorSelector = function(color) { + var fakeSeries = { color: color }; + popoverSrv.show({ + element: $element.find('.dropdown')[0], + position: 'top center', + openOn: 'click', + template: '', + model: { + autoClose: true, + colorSelected: $scope.colorSelected, + series: fakeSeries, + }, + onClose: function() { + $scope.ctrl.render(); + }, + }); + }; - $scope.getSeriesNames = function() { - return _.map($scope.ctrl.seriesList, function(series) { - return series.alias; - }); - }; + $scope.removeOverride = function(option) { + delete $scope.override[option.propertyName]; + $scope.updateCurrentOverrides(); + $scope.ctrl.refresh(); + }; + + $scope.getSeriesNames = function() { + return _.map($scope.ctrl.seriesList, function(series) { + return series.alias; + }); + }; - $scope.updateCurrentOverrides = function() { - $scope.currentOverrides = []; - _.each($scope.overrideMenu, function(option) { - var value = $scope.override[option.propertyName]; - if (_.isUndefined(value)) { - return; - } - $scope.currentOverrides.push({ - name: option.text, - propertyName: option.propertyName, - value: String(value), - }); + $scope.updateCurrentOverrides = function() { + $scope.currentOverrides = []; + _.each($scope.overrideMenu, function(option) { + var value = $scope.override[option.propertyName]; + if (_.isUndefined(value)) { + return; + } + $scope.currentOverrides.push({ + name: option.text, + propertyName: option.propertyName, + value: String(value), }); - }; + }); + }; - $scope.addOverrideOption('Bars', 'bars', [true, false]); - $scope.addOverrideOption('Lines', 'lines', [true, false]); - $scope.addOverrideOption('Line fill', 'fill', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); - $scope.addOverrideOption('Line width', 'linewidth', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); - $scope.addOverrideOption('Null point mode', 'nullPointMode', ['connected', 'null', 'null as zero']); - $scope.addOverrideOption('Fill below to', 'fillBelowTo', $scope.getSeriesNames()); - $scope.addOverrideOption('Staircase line', 'steppedLine', [true, false]); - $scope.addOverrideOption('Dashes', 'dashes', [true, false]); - $scope.addOverrideOption('Dash Length', 'dashLength', [ - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - ]); - $scope.addOverrideOption('Dash Space', 'spaceLength', [ - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - ]); - $scope.addOverrideOption('Points', 'points', [true, false]); - $scope.addOverrideOption('Points Radius', 'pointradius', [1, 2, 3, 4, 5]); - $scope.addOverrideOption('Stack', 'stack', [true, false, 'A', 'B', 'C', 'D']); - $scope.addOverrideOption('Color', 'color', ['change']); - $scope.addOverrideOption('Y-axis', 'yaxis', [1, 2]); - $scope.addOverrideOption('Z-index', 'zindex', [-3, -2, -1, 0, 1, 2, 3]); - $scope.addOverrideOption('Transform', 'transform', ['negative-Y']); - $scope.addOverrideOption('Legend', 'legend', [true, false]); - $scope.updateCurrentOverrides(); - } + $scope.addOverrideOption('Bars', 'bars', [true, false]); + $scope.addOverrideOption('Lines', 'lines', [true, false]); + $scope.addOverrideOption('Line fill', 'fill', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); + $scope.addOverrideOption('Line width', 'linewidth', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]); + $scope.addOverrideOption('Null point mode', 'nullPointMode', ['connected', 'null', 'null as zero']); + $scope.addOverrideOption('Fill below to', 'fillBelowTo', $scope.getSeriesNames()); + $scope.addOverrideOption('Staircase line', 'steppedLine', [true, false]); + $scope.addOverrideOption('Dashes', 'dashes', [true, false]); + $scope.addOverrideOption('Dash Length', 'dashLength', [ + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + ]); + $scope.addOverrideOption('Dash Space', 'spaceLength', [ + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + ]); + $scope.addOverrideOption('Points', 'points', [true, false]); + $scope.addOverrideOption('Points Radius', 'pointradius', [1, 2, 3, 4, 5]); + $scope.addOverrideOption('Stack', 'stack', [true, false, 'A', 'B', 'C', 'D']); + $scope.addOverrideOption('Color', 'color', ['change']); + $scope.addOverrideOption('Y-axis', 'yaxis', [1, 2]); + $scope.addOverrideOption('Z-index', 'zindex', [-3, -2, -1, 0, 1, 2, 3]); + $scope.addOverrideOption('Transform', 'transform', ['negative-Y']); + $scope.addOverrideOption('Legend', 'legend', [true, false]); + $scope.updateCurrentOverrides(); } angular.module('grafana.controllers').controller('SeriesOverridesCtrl', SeriesOverridesCtrl); diff --git a/public/app/plugins/panel/graph/specs/series_override_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/series_override_ctrl.jest.ts new file mode 100644 index 000000000000..2e7456a132a4 --- /dev/null +++ b/public/app/plugins/panel/graph/specs/series_override_ctrl.jest.ts @@ -0,0 +1,42 @@ +import '../series_overrides_ctrl'; +import { SeriesOverridesCtrl } from '../series_overrides_ctrl'; + +describe('SeriesOverridesCtrl', () => { + let popoverSrv = {}; + let $scope; + + beforeEach(() => { + $scope = { + ctrl: { + refresh: jest.fn(), + render: jest.fn(), + seriesList: [], + }, + render: jest.fn(() => {}), + }; + SeriesOverridesCtrl($scope, {}, popoverSrv); + }); + + describe('When setting an override', () => { + beforeEach(() => { + $scope.setOverride({ propertyName: 'lines' }, { value: true }); + }); + + it('should set override property', () => { + expect($scope.override.lines).toBe(true); + }); + + it('should update view model', () => { + expect($scope.currentOverrides[0].name).toBe('Lines'); + expect($scope.currentOverrides[0].value).toBe('true'); + }); + }); + + describe('When removing overide', () => { + it('click should include option and value index', () => { + $scope.setOverride(1, 0); + $scope.removeOverride({ propertyName: 'lines' }); + expect($scope.currentOverrides.length).toBe(0); + }); + }); +}); diff --git a/public/app/plugins/panel/graph/specs/series_override_ctrl_specs.ts b/public/app/plugins/panel/graph/specs/series_override_ctrl_specs.ts deleted file mode 100644 index 9e311c0775ef..000000000000 --- a/public/app/plugins/panel/graph/specs/series_override_ctrl_specs.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { describe, beforeEach, it, expect, sinon, angularMocks } from 'test/lib/common'; -import '../series_overrides_ctrl'; -import helpers from 'test/specs/helpers'; - -describe('SeriesOverridesCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - var popoverSrv = {}; - - beforeEach(angularMocks.module('grafana.services')); - beforeEach(angularMocks.module('grafana.controllers')); - - beforeEach( - ctx.providePhase({ - popoverSrv: popoverSrv, - }) - ); - - beforeEach( - angularMocks.inject(function($rootScope, $controller) { - ctx.scope = $rootScope.$new(); - ctx.scope.ctrl = { - refresh: sinon.spy(), - render: sinon.spy(), - seriesList: [], - }; - ctx.scope.render = function() {}; - ctx.controller = $controller('SeriesOverridesCtrl', { - $scope: ctx.scope, - }); - }) - ); - - describe('When setting an override', function() { - beforeEach(function() { - ctx.scope.setOverride({ propertyName: 'lines' }, { value: true }); - }); - - it('should set override property', function() { - expect(ctx.scope.override.lines).to.be(true); - }); - - it('should update view model', function() { - expect(ctx.scope.currentOverrides[0].name).to.be('Lines'); - expect(ctx.scope.currentOverrides[0].value).to.be('true'); - }); - }); - - describe('When removing overide', function() { - it('click should include option and value index', function() { - ctx.scope.setOverride(1, 0); - ctx.scope.removeOverride({ propertyName: 'lines' }); - expect(ctx.scope.currentOverrides.length).to.be(0); - }); - }); -}); From 54420363d3640570dafdcba24afda5b6fdc04c1d Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 28 Jun 2018 14:02:42 +0200 Subject: [PATCH 019/263] fix footer css issue --- public/sass/components/_footer.scss | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/public/sass/components/_footer.scss b/public/sass/components/_footer.scss index 4a77ec37605b..3c30f5a57988 100644 --- a/public/sass/components/_footer.scss +++ b/public/sass/components/_footer.scss @@ -45,9 +45,11 @@ } @include media-breakpoint-up(md) { - .footer { - bottom: $spacer; - position: absolute; - padding: 5rem 0 1rem 0; + .login-page { + .footer { + bottom: $spacer; + position: absolute; + padding: 5rem 0 1rem 0; + } } } From 4c4bd2ebba839b6d043bc69606b9977fef3efe39 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 28 Jun 2018 14:27:41 +0200 Subject: [PATCH 020/263] changelog: add notes about closing #12430 [skip ci] --- CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c943b2b97767..e149b4203538 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,12 @@ * **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley) * **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248) +# 5.2.1 (unreleased) + +### Minor + +* **UI**: Fix - Grafana footer overlapping page [#12430](https://github.com/grafana/grafana/issues/12430) + # 5.2.0-stable (2018-06-27) ### Minor From 7a2be69abcd07d22d48815e554e02a25bea14a22 Mon Sep 17 00:00:00 2001 From: Daniel Lee Date: Thu, 28 Jun 2018 14:46:40 +0200 Subject: [PATCH 021/263] changelog: adds note for #11892 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e149b4203538..7be0564bc967 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ * **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley) * **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248) +* **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps) # 5.2.1 (unreleased) From 8af5da738340e55ded1ee5e1a3b14287264d0451 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 28 Jun 2018 15:43:33 +0200 Subject: [PATCH 022/263] Revert "auth proxy: use real ip when validating white listed ip's" --- pkg/middleware/auth_proxy.go | 20 +++++------ pkg/middleware/middleware_test.go | 55 ------------------------------- 2 files changed, 8 insertions(+), 67 deletions(-) diff --git a/pkg/middleware/auth_proxy.go b/pkg/middleware/auth_proxy.go index eff532b0da2d..144a0ae3a693 100644 --- a/pkg/middleware/auth_proxy.go +++ b/pkg/middleware/auth_proxy.go @@ -2,6 +2,7 @@ package middleware import ( "fmt" + "net" "net/mail" "reflect" "strings" @@ -28,7 +29,7 @@ func initContextWithAuthProxy(ctx *m.ReqContext, orgID int64) bool { } // if auth proxy ip(s) defined, check if request comes from one of those - if err := checkAuthenticationProxy(ctx.RemoteAddr(), proxyHeaderValue); err != nil { + if err := checkAuthenticationProxy(ctx.Req.RemoteAddr, proxyHeaderValue); err != nil { ctx.Handle(407, "Proxy authentication required", err) return true } @@ -196,23 +197,18 @@ func checkAuthenticationProxy(remoteAddr string, proxyHeaderValue string) error return nil } - // Multiple ip addresses? Right-most IP address is the IP address of the most recent proxy - if strings.Contains(remoteAddr, ",") { - sourceIPs := strings.Split(remoteAddr, ",") - remoteAddr = strings.TrimSpace(sourceIPs[len(sourceIPs)-1]) - } - - remoteAddr = strings.TrimPrefix(remoteAddr, "[") - remoteAddr = strings.TrimSuffix(remoteAddr, "]") - proxies := strings.Split(setting.AuthProxyWhitelist, ",") + sourceIP, _, err := net.SplitHostPort(remoteAddr) + if err != nil { + return err + } // Compare allowed IP addresses to actual address for _, proxyIP := range proxies { - if remoteAddr == strings.TrimSpace(proxyIP) { + if sourceIP == strings.TrimSpace(proxyIP) { return nil } } - return fmt.Errorf("Request for user (%s) from %s is not from the authentication proxy", proxyHeaderValue, remoteAddr) + return fmt.Errorf("Request for user (%s) from %s is not from the authentication proxy", proxyHeaderValue, sourceIP) } diff --git a/pkg/middleware/middleware_test.go b/pkg/middleware/middleware_test.go index d82b7313585c..87c23a7b49a9 100644 --- a/pkg/middleware/middleware_test.go +++ b/pkg/middleware/middleware_test.go @@ -315,61 +315,6 @@ func TestMiddlewareContext(t *testing.T) { }) }) - middlewareScenario("When auth_proxy is enabled and request has X-Forwarded-For that is not trusted", func(sc *scenarioContext) { - setting.AuthProxyEnabled = true - setting.AuthProxyHeaderName = "X-WEBAUTH-USER" - setting.AuthProxyHeaderProperty = "username" - setting.AuthProxyWhitelist = "192.168.1.1, 2001::23" - - bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { - query.Result = &m.SignedInUser{OrgId: 4, UserId: 33} - return nil - }) - - bus.AddHandler("test", func(cmd *m.UpsertUserCommand) error { - cmd.Result = &m.User{Id: 33} - return nil - }) - - sc.fakeReq("GET", "/") - sc.req.Header.Add("X-WEBAUTH-USER", "torkelo") - sc.req.Header.Add("X-Forwarded-For", "client-ip, 192.168.1.1, 192.168.1.2") - sc.exec() - - Convey("should return 407 status code", func() { - So(sc.resp.Code, ShouldEqual, 407) - So(sc.resp.Body.String(), ShouldContainSubstring, "Request for user (torkelo) from 192.168.1.2 is not from the authentication proxy") - }) - }) - - middlewareScenario("When auth_proxy is enabled and request has X-Forwarded-For that is trusted", func(sc *scenarioContext) { - setting.AuthProxyEnabled = true - setting.AuthProxyHeaderName = "X-WEBAUTH-USER" - setting.AuthProxyHeaderProperty = "username" - setting.AuthProxyWhitelist = "192.168.1.1, 2001::23" - - bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { - query.Result = &m.SignedInUser{OrgId: 4, UserId: 33} - return nil - }) - - bus.AddHandler("test", func(cmd *m.UpsertUserCommand) error { - cmd.Result = &m.User{Id: 33} - return nil - }) - - sc.fakeReq("GET", "/") - sc.req.Header.Add("X-WEBAUTH-USER", "torkelo") - sc.req.Header.Add("X-Forwarded-For", "client-ip, 192.168.1.2, 192.168.1.1") - sc.exec() - - Convey("Should init context with user info", func() { - So(sc.context.IsSignedIn, ShouldBeTrue) - So(sc.context.UserId, ShouldEqual, 33) - So(sc.context.OrgId, ShouldEqual, 4) - }) - }) - middlewareScenario("When session exists for previous user, create a new session", func(sc *scenarioContext) { setting.AuthProxyEnabled = true setting.AuthProxyHeaderName = "X-WEBAUTH-USER" From fb2b2c9f656f9d8f894edea70418b78efd3f22f3 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 28 Jun 2018 16:39:38 +0200 Subject: [PATCH 023/263] changelog: add notes about closing #12444 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7be0564bc967..aaa9dec5c4d4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ ### Minor * **UI**: Fix - Grafana footer overlapping page [#12430](https://github.com/grafana/grafana/issues/12430) +* **Auth Proxy**: Revert of "Whitelist proxy IP address instead of client IP address" introduced in 5.2.0-beta2 [#12444](https://github.com/grafana/grafana/pull/12444) # 5.2.0-stable (2018-06-27) From a0e8437f8f70f33490d6c57dce46bf0de441048b Mon Sep 17 00:00:00 2001 From: Austin Winstanley Date: Fri, 29 Jun 2018 03:48:14 +0000 Subject: [PATCH 024/263] Used PostgreSQL TSDB as a model the set up the __timeFilter, __timeFrom, and __timeTo macros for Microsoft SQL and MySQL --- pkg/tsdb/mssql/macros.go | 6 +++--- pkg/tsdb/mssql/macros_test.go | 18 +++++++++--------- pkg/tsdb/mysql/macros.go | 6 +++--- pkg/tsdb/mysql/macros_test.go | 18 +++++++++--------- yarn.lock | 12 ++++++------ 5 files changed, 30 insertions(+), 30 deletions(-) diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go index 2621110ee99e..b136bdfb459d 100644 --- a/pkg/tsdb/mssql/macros.go +++ b/pkg/tsdb/mssql/macros.go @@ -83,11 +83,11 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], time.Unix(m.TimeRange.GetFromAsSecondsEpoch(), 0), time.Unix(m.TimeRange.GetToAsSecondsEpoch(), 0)), nil + return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeFrom": - return fmt.Sprintf("'%s'", time.Unix(m.TimeRange.GetFromAsSecondsEpoch(), 0)), nil + return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil case "__timeTo": - return fmt.Sprintf("'%s'", time.Unix(m.TimeRange.GetToAsSecondsEpoch(), 0)), nil + return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) diff --git a/pkg/tsdb/mssql/macros_test.go b/pkg/tsdb/mssql/macros_test.go index 46f67e7d1695..67ee1da75616 100644 --- a/pkg/tsdb/mssql/macros_test.go +++ b/pkg/tsdb/mssql/macros_test.go @@ -49,7 +49,7 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", time.Unix(from.Unix(), 0), time.Unix(to.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) Convey("interpolate __timeGroup function", func() { @@ -96,14 +96,14 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", time.Unix(from.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) }) Convey("interpolate __timeTo function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", time.Unix(to.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) }) Convey("interpolate __unixEpochFilter function", func() { @@ -137,21 +137,21 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", time.Unix(from.Unix(), 0), time.Unix(to.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) Convey("interpolate __timeFrom function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", time.Unix(from.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) }) Convey("interpolate __timeTo function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", time.Unix(to.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) }) Convey("interpolate __unixEpochFilter function", func() { @@ -185,21 +185,21 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", time.Unix(from.Unix(), 0), time.Unix(to.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) Convey("interpolate __timeFrom function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", time.Unix(from.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) }) Convey("interpolate __timeTo function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", time.Unix(to.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) }) Convey("interpolate __unixEpochFilter function", func() { diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go index 6dfe6ad952f2..1855670e8638 100644 --- a/pkg/tsdb/mysql/macros.go +++ b/pkg/tsdb/mysql/macros.go @@ -78,11 +78,11 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], time.Unix(m.TimeRange.GetFromAsSecondsEpoch(), 0), time.Unix(m.TimeRange.GetToAsSecondsEpoch(), 0)), nil + return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeFrom": - return fmt.Sprintf("'%s'", time.Unix(m.TimeRange.GetFromAsSecondsEpoch(), 0)), nil + return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil case "__timeTo": - return fmt.Sprintf("'%s'", time.Unix(m.TimeRange.GetToAsSecondsEpoch(), 0)), nil + return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) diff --git a/pkg/tsdb/mysql/macros_test.go b/pkg/tsdb/mysql/macros_test.go index 11ac6d8b2f34..6af1d180737c 100644 --- a/pkg/tsdb/mysql/macros_test.go +++ b/pkg/tsdb/mysql/macros_test.go @@ -54,21 +54,21 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", time.Unix(from.Unix(), 0), time.Unix(to.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) Convey("interpolate __timeFrom function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", time.Unix(from.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) }) Convey("interpolate __timeTo function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", time.Unix(to.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) }) Convey("interpolate __unixEpochFilter function", func() { @@ -102,21 +102,21 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", time.Unix(from.Unix(), 0), time.Unix(to.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) Convey("interpolate __timeFrom function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", time.Unix(from.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) }) Convey("interpolate __timeTo function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", time.Unix(to.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) }) Convey("interpolate __unixEpochFilter function", func() { @@ -150,21 +150,21 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", time.Unix(from.Unix(), 0), time.Unix(to.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) Convey("interpolate __timeFrom function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", time.Unix(from.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) }) Convey("interpolate __timeTo function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select '%s'", time.Unix(to.Unix(), 0))) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) }) Convey("interpolate __unixEpochFilter function", func() { diff --git a/yarn.lock b/yarn.lock index 6cc48a7c79d2..6772d7c14a49 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1114,7 +1114,7 @@ babel-plugin-istanbul@^4.1.5, babel-plugin-istanbul@^4.1.6: istanbul-lib-instrument "^1.10.1" test-exclude "^4.2.1" -babel-plugin-jest-hoist@^22.4.4: +babel-plugin-jest-hoist@^22.4.3, babel-plugin-jest-hoist@^22.4.4: version "22.4.4" resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-22.4.4.tgz#b9851906eab34c7bf6f8c895a2b08bea1a844c0b" @@ -6498,7 +6498,7 @@ jest-docblock@^22.4.0, jest-docblock@^22.4.3: dependencies: detect-newline "^2.1.0" -jest-environment-jsdom@^22.4.1: +jest-environment-jsdom@^22.4.1, jest-environment-jsdom@^22.4.3: version "22.4.3" resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-22.4.3.tgz#d67daa4155e33516aecdd35afd82d4abf0fa8a1e" dependencies: @@ -6506,7 +6506,7 @@ jest-environment-jsdom@^22.4.1: jest-util "^22.4.3" jsdom "^11.5.1" -jest-environment-node@^22.4.1: +jest-environment-node@^22.4.1, jest-environment-node@^22.4.3: version "22.4.3" resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-22.4.3.tgz#54c4eaa374c83dd52a9da8759be14ebe1d0b9129" dependencies: @@ -6533,7 +6533,7 @@ jest-haste-map@^22.4.2: micromatch "^2.3.11" sane "^2.0.0" -jest-jasmine2@^22.4.4: +jest-jasmine2@^22.4.3, jest-jasmine2@^22.4.4: version "22.4.4" resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-22.4.4.tgz#c55f92c961a141f693f869f5f081a79a10d24e23" dependencies: @@ -6587,7 +6587,7 @@ jest-resolve-dependencies@^22.1.0: dependencies: jest-regex-util "^22.4.3" -jest-resolve@^22.4.2: +jest-resolve@^22.4.2, jest-resolve@^22.4.3: version "22.4.3" resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-22.4.3.tgz#0ce9d438c8438229aa9b916968ec6b05c1abb4ea" dependencies: @@ -6671,7 +6671,7 @@ jest-validate@^21.1.0: leven "^2.1.0" pretty-format "^21.2.1" -jest-validate@^22.4.4: +jest-validate@^22.4.3, jest-validate@^22.4.4: version "22.4.4" resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-22.4.4.tgz#1dd0b616ef46c995de61810d85f57119dbbcec4d" dependencies: From 9823da1cc51e3a6e0222cd8c9c36a97b4f978fd3 Mon Sep 17 00:00:00 2001 From: Austin Winstanley Date: Fri, 29 Jun 2018 03:54:43 +0000 Subject: [PATCH 025/263] Reverted yarn.lock to master --- yarn.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/yarn.lock b/yarn.lock index 6772d7c14a49..6cc48a7c79d2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1114,7 +1114,7 @@ babel-plugin-istanbul@^4.1.5, babel-plugin-istanbul@^4.1.6: istanbul-lib-instrument "^1.10.1" test-exclude "^4.2.1" -babel-plugin-jest-hoist@^22.4.3, babel-plugin-jest-hoist@^22.4.4: +babel-plugin-jest-hoist@^22.4.4: version "22.4.4" resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-22.4.4.tgz#b9851906eab34c7bf6f8c895a2b08bea1a844c0b" @@ -6498,7 +6498,7 @@ jest-docblock@^22.4.0, jest-docblock@^22.4.3: dependencies: detect-newline "^2.1.0" -jest-environment-jsdom@^22.4.1, jest-environment-jsdom@^22.4.3: +jest-environment-jsdom@^22.4.1: version "22.4.3" resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-22.4.3.tgz#d67daa4155e33516aecdd35afd82d4abf0fa8a1e" dependencies: @@ -6506,7 +6506,7 @@ jest-environment-jsdom@^22.4.1, jest-environment-jsdom@^22.4.3: jest-util "^22.4.3" jsdom "^11.5.1" -jest-environment-node@^22.4.1, jest-environment-node@^22.4.3: +jest-environment-node@^22.4.1: version "22.4.3" resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-22.4.3.tgz#54c4eaa374c83dd52a9da8759be14ebe1d0b9129" dependencies: @@ -6533,7 +6533,7 @@ jest-haste-map@^22.4.2: micromatch "^2.3.11" sane "^2.0.0" -jest-jasmine2@^22.4.3, jest-jasmine2@^22.4.4: +jest-jasmine2@^22.4.4: version "22.4.4" resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-22.4.4.tgz#c55f92c961a141f693f869f5f081a79a10d24e23" dependencies: @@ -6587,7 +6587,7 @@ jest-resolve-dependencies@^22.1.0: dependencies: jest-regex-util "^22.4.3" -jest-resolve@^22.4.2, jest-resolve@^22.4.3: +jest-resolve@^22.4.2: version "22.4.3" resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-22.4.3.tgz#0ce9d438c8438229aa9b916968ec6b05c1abb4ea" dependencies: @@ -6671,7 +6671,7 @@ jest-validate@^21.1.0: leven "^2.1.0" pretty-format "^21.2.1" -jest-validate@^22.4.3, jest-validate@^22.4.4: +jest-validate@^22.4.4: version "22.4.4" resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-22.4.4.tgz#1dd0b616ef46c995de61810d85f57119dbbcec4d" dependencies: From ce88e4b927a4d417c91829e2e0bc227756ebb915 Mon Sep 17 00:00:00 2001 From: bergquist Date: Fri, 29 Jun 2018 09:35:31 +0200 Subject: [PATCH 026/263] fixes typos closes #12453 --- CHANGELOG.md | 2 +- public/app/partials/login.html | 2 +- yarn.lock | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index aaa9dec5c4d4..fcb1cf6d19d5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1336,7 +1336,7 @@ Grafana 2.x is fundamentally different from 1.x; it now ships with an integrated **New features** - [Issue #1623](https://github.com/grafana/grafana/issues/1623). Share Dashboard: Dashboard snapshot sharing (dash and data snapshot), save to local or save to public snapshot dashboard snapshots.raintank.io site - [Issue #1622](https://github.com/grafana/grafana/issues/1622). Share Panel: The share modal now has an embed option, gives you an iframe that you can use to embedd a single graph on another web site -- [Issue #718](https://github.com/grafana/grafana/issues/718). Dashboard: When saving a dashboard and another user has made changes in between the user is promted with a warning if he really wants to overwrite the other's changes +- [Issue #718](https://github.com/grafana/grafana/issues/718). Dashboard: When saving a dashboard and another user has made changes in between the user is prompted with a warning if he really wants to overwrite the other's changes - [Issue #1331](https://github.com/grafana/grafana/issues/1331). Graph & Singlestat: New axis/unit format selector and more units (kbytes, Joule, Watt, eV), and new design for graph axis & grid tab and single stat options tab views - [Issue #1241](https://github.com/grafana/grafana/issues/1242). Timepicker: New option in timepicker (under dashboard settings), to change ``now`` to be for example ``now-1m``, useful when you want to ignore last minute because it contains incomplete data - [Issue #171](https://github.com/grafana/grafana/issues/171). Panel: Different time periods, panels can override dashboard relative time and/or add a time shift diff --git a/public/app/partials/login.html b/public/app/partials/login.html index 8be9e777b9fa..1919759334bc 100644 --- a/public/app/partials/login.html +++ b/public/app/partials/login.html @@ -89,7 +89,7 @@
    Change Password
    Skip - If you skip you will be promted to change password next time you login. + If you skip you will be prompted to change password next time you login. @@ -26,42 +26,80 @@

    Team Details

    Team Members

    - +
    Add member - - + +
    - - - - - - - - - - - - - - -
    UsernameEmail
    {{member.login}}{{member.email}} - - - -
    -
    - - This team has no members yet. - -
    + + + + Username + Email + + + + + + + + {{member.login}} + {{member.email}} + + + + + + + +
    + + This team has no members yet. + +
    + +
    + +
    + +

    Team Group Mapping

    +
    +
    + Add group + +
    +
    + +
    +
    + + + + + + + + + + + + +
    Group
    {{group.groupId}} + + + +
    +
    + + This team has no associated groups yet. + +
    +
    diff --git a/public/app/features/org/team_details_ctrl.ts b/public/app/features/org/team_details_ctrl.ts index 3d193880635f..35fa04f55d3e 100644 --- a/public/app/features/org/team_details_ctrl.ts +++ b/public/app/features/org/team_details_ctrl.ts @@ -1,15 +1,21 @@ import coreModule from 'app/core/core_module'; +import config from 'app/core/config'; export default class TeamDetailsCtrl { team: Team; teamMembers: User[] = []; navModel: any; + teamGroups: TeamGroup[] = []; + newGroupId: string; + enterprise: boolean; /** @ngInject **/ constructor(private $scope, private backendSrv, private $routeParams, navModelSrv) { this.navModel = navModelSrv.getNav('cfg', 'teams', 0); this.userPicked = this.userPicked.bind(this); this.get = this.get.bind(this); + this.newGroupId = ''; + this.enterprise = config.enterprise; this.get(); } @@ -18,9 +24,16 @@ export default class TeamDetailsCtrl { this.backendSrv.get(`/api/teams/${this.$routeParams.id}`).then(result => { this.team = result; }); + this.backendSrv.get(`/api/teams/${this.$routeParams.id}/members`).then(result => { this.teamMembers = result; }); + + if (config.enterprise) { + this.backendSrv.get(`/api/teams/${this.$routeParams.id}/groups`).then(result => { + this.teamGroups = result; + }); + } } } @@ -57,6 +70,20 @@ export default class TeamDetailsCtrl { this.get(); }); } + + addGroup() { + this.backendSrv.post(`/api/teams/${this.$routeParams.id}/groups`, { groupId: this.newGroupId }).then(() => { + this.get(); + }); + } + + removeGroup(group: TeamGroup) { + this.backendSrv.delete(`/api/teams/${this.$routeParams.id}/groups/${group.groupId}`).then(this.get); + } +} + +export interface TeamGroup { + groupId: string; } export interface Team { diff --git a/scripts/build/build_enterprise.sh b/scripts/build/build_enterprise.sh index 02d8c78c8855..cda3952c36a0 100755 --- a/scripts/build/build_enterprise.sh +++ b/scripts/build/build_enterprise.sh @@ -14,9 +14,9 @@ cd /go/src/github.com/grafana/grafana echo "current dir: $(pwd)" cd .. -git clone -b ee_build --single-branch git@github.com:grafana/grafana-enterprise.git --depth 10 +git clone -b master --single-branch git@github.com:grafana/grafana-enterprise.git --depth 10 cd grafana-enterprise -git checkout 7fbae9c1be3467c4a39cf6ad85278a6896ceb49f +#git checkout 7fbae9c1be3467c4a39cf6ad85278a6896ceb49f ./build.sh cd ../grafana From aa1b5959da01c2bb283c30ab49c89c230ba300c9 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Mon, 2 Jul 2018 14:56:24 +0900 Subject: [PATCH 046/263] omit extra template refresh (#12454) --- public/app/features/templating/variable_srv.ts | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/public/app/features/templating/variable_srv.ts b/public/app/features/templating/variable_srv.ts index 8a096dd9ad20..8ad3c2845e20 100644 --- a/public/app/features/templating/variable_srv.ts +++ b/public/app/features/templating/variable_srv.ts @@ -38,7 +38,11 @@ export class VariableSrv { }); } - onDashboardRefresh() { + onDashboardRefresh(evt, payload) { + if (payload && payload.fromVariableValueUpdated) { + return Promise.resolve({}); + } + var promises = this.variables.filter(variable => variable.refresh === 2).map(variable => { var previousOptions = variable.options.slice(); @@ -130,7 +134,7 @@ export class VariableSrv { return this.$q.all(promises).then(() => { if (emitChangeEvents) { this.$rootScope.$emit('template-variable-value-updated'); - this.$rootScope.$broadcast('refresh'); + this.$rootScope.$broadcast('refresh', { fromVariableValueUpdated: true }); } }); } From 10e86eda690db3934a21ad95fe9df9b05c32d3fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Sun, 1 Jul 2018 23:35:50 -0700 Subject: [PATCH 047/263] fix: #12461 introduced issues with route registration ordering, adding plugin static routes before plugins package had been initiated (#12474) --- pkg/api/api.go | 12 ------------ pkg/api/app_routes.go | 2 +- pkg/api/http_server.go | 25 ++++++++++++++++++++++--- 3 files changed, 23 insertions(+), 16 deletions(-) diff --git a/pkg/api/api.go b/pkg/api/api.go index 78c7aaf3f397..829b77353435 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -9,16 +9,7 @@ import ( m "github.com/grafana/grafana/pkg/models" ) -func (hs *HTTPServer) applyRoutes() { - hs.RouteRegister.Register(hs.macaron) - - InitAppPluginRoutes(hs.macaron) - - hs.macaron.NotFound(NotFoundHandler) -} - func (hs *HTTPServer) registerRoutes() { - macaronR := hs.macaron reqSignedIn := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true}) reqGrafanaAdmin := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true}) reqEditorRole := middleware.RoleAuth(m.ROLE_EDITOR, m.ROLE_ADMIN) @@ -28,9 +19,6 @@ func (hs *HTTPServer) registerRoutes() { quota := middleware.Quota bind := binding.Bind - // automatically set HEAD for every GET - macaronR.SetAutoHead(true) - r := hs.RouteRegister // not logged in views diff --git a/pkg/api/app_routes.go b/pkg/api/app_routes.go index 0b7dcd32ce3b..a2137089fc6d 100644 --- a/pkg/api/app_routes.go +++ b/pkg/api/app_routes.go @@ -18,7 +18,7 @@ import ( var pluginProxyTransport *http.Transport -func InitAppPluginRoutes(r *macaron.Macaron) { +func (hs *HTTPServer) initAppPluginRoutes(r *macaron.Macaron) { pluginProxyTransport = &http.Transport{ TLSClientConfig: &tls.Config{ InsecureSkipVerify: setting.PluginAppsSkipVerifyTLS, diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index 627192bb69ba..0de63ce5e08b 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -69,6 +69,7 @@ func (hs *HTTPServer) Run(ctx context.Context) error { var err error hs.context = ctx + hs.applyRoutes() hs.streamManager.Run(ctx) @@ -169,6 +170,26 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron { macaron.Env = setting.Env m := macaron.New() + // automatically set HEAD for every GET + m.SetAutoHead(true) + + return m +} + +func (hs *HTTPServer) applyRoutes() { + // start with middlewares & static routes + hs.addMiddlewaresAndStaticRoutes() + // then add view routes & api routes + hs.RouteRegister.Register(hs.macaron) + // then custom app proxy routes + hs.initAppPluginRoutes(hs.macaron) + // lastly not found route + hs.macaron.NotFound(NotFoundHandler) +} + +func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() { + m := hs.macaron + m.Use(middleware.Logger()) if setting.EnableGzip { @@ -180,7 +201,7 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron { for _, route := range plugins.StaticRoutes { pluginRoute := path.Join("/public/plugins/", route.PluginId) hs.log.Debug("Plugins: Adding route", "route", pluginRoute, "dir", route.Directory) - hs.mapStatic(m, route.Directory, "", pluginRoute) + hs.mapStatic(hs.macaron, route.Directory, "", pluginRoute) } hs.mapStatic(m, setting.StaticRootPath, "build", "public/build") @@ -209,8 +230,6 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron { } m.Use(middleware.AddDefaultResponseHeaders()) - - return m } func (hs *HTTPServer) metricsEndpoint(ctx *macaron.Context) { From 2941dff42887c750c75d3cf2e26fd825de2cce89 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 2 Jul 2018 13:18:23 +0200 Subject: [PATCH 048/263] Karma to Jest: datasource_srv (#12456) * Karma to Jest: datasource_srv * Sort function differs between Karma and Jest * Fix error based on .sort() implementation * Remove Karma test * alerting: only log when screenshot been uploaded * Remove comments * Karma to Jest: datasource_srv * Sort function differs between Karma and Jest * Fix error based on .sort() implementation * Remove Karma test * Remove comments * Remove console.log * Remove console.log * Change sorting and add test for default data source --- public/app/features/plugins/datasource_srv.ts | 25 ++++---- .../plugins/specs/datasource_srv.jest.ts | 59 +++++++++++++++++ .../plugins/specs/datasource_srv_specs.ts | 64 ------------------- 3 files changed, 73 insertions(+), 75 deletions(-) create mode 100644 public/app/features/plugins/specs/datasource_srv.jest.ts delete mode 100644 public/app/features/plugins/specs/datasource_srv_specs.ts diff --git a/public/app/features/plugins/datasource_srv.ts b/public/app/features/plugins/datasource_srv.ts index 19bdf599ce1e..b5e0316163c8 100644 --- a/public/app/features/plugins/datasource_srv.ts +++ b/public/app/features/plugins/datasource_srv.ts @@ -91,10 +91,20 @@ export class DatasourceSrv { _.each(config.datasources, function(value, key) { if (value.meta && value.meta.metrics) { - metricSources.push({ value: key, name: key, meta: value.meta }); + let metricSource = { value: key, name: key, meta: value.meta, sort: key }; + + //Make sure grafana and mixed are sorted at the bottom + if (value.meta.id === 'grafana') { + metricSource.sort = String.fromCharCode(253); + } else if (value.meta.id === 'mixed') { + metricSource.sort = String.fromCharCode(254); + } + + metricSources.push(metricSource); if (key === config.defaultDatasource) { - metricSources.push({ value: null, name: 'default', meta: value.meta }); + metricSource = { value: null, name: 'default', meta: value.meta, sort: key }; + metricSources.push(metricSource); } } }); @@ -104,17 +114,10 @@ export class DatasourceSrv { } metricSources.sort(function(a, b) { - // these two should always be at the bottom - if (a.meta.id === 'mixed' || a.meta.id === 'grafana') { - return 1; - } - if (b.meta.id === 'mixed' || b.meta.id === 'grafana') { - return -1; - } - if (a.name.toLowerCase() > b.name.toLowerCase()) { + if (a.sort.toLowerCase() > b.sort.toLowerCase()) { return 1; } - if (a.name.toLowerCase() < b.name.toLowerCase()) { + if (a.sort.toLowerCase() < b.sort.toLowerCase()) { return -1; } return 0; diff --git a/public/app/features/plugins/specs/datasource_srv.jest.ts b/public/app/features/plugins/specs/datasource_srv.jest.ts new file mode 100644 index 000000000000..f261c4e22498 --- /dev/null +++ b/public/app/features/plugins/specs/datasource_srv.jest.ts @@ -0,0 +1,59 @@ +import config from 'app/core/config'; +import 'app/features/plugins/datasource_srv'; +import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; + +describe('datasource_srv', function() { + let _datasourceSrv = new DatasourceSrv({}, {}, {}, {}); + let metricSources; + + describe('when loading metric sources', () => { + let unsortedDatasources = { + mmm: { + type: 'test-db', + meta: { metrics: { m: 1 } }, + }, + '--Grafana--': { + type: 'grafana', + meta: { builtIn: true, metrics: { m: 1 }, id: 'grafana' }, + }, + '--Mixed--': { + type: 'test-db', + meta: { builtIn: true, metrics: { m: 1 }, id: 'mixed' }, + }, + ZZZ: { + type: 'test-db', + meta: { metrics: { m: 1 } }, + }, + aaa: { + type: 'test-db', + meta: { metrics: { m: 1 } }, + }, + BBB: { + type: 'test-db', + meta: { metrics: { m: 1 } }, + }, + }; + beforeEach(() => { + config.datasources = unsortedDatasources; + metricSources = _datasourceSrv.getMetricSources({ skipVariables: true }); + }); + + it('should return a list of sources sorted case insensitively with builtin sources last', () => { + expect(metricSources[0].name).toBe('aaa'); + expect(metricSources[1].name).toBe('BBB'); + expect(metricSources[2].name).toBe('mmm'); + expect(metricSources[3].name).toBe('ZZZ'); + expect(metricSources[4].name).toBe('--Grafana--'); + expect(metricSources[5].name).toBe('--Mixed--'); + }); + + beforeEach(() => { + config.defaultDatasource = 'BBB'; + }); + + it('should set default data source', () => { + expect(metricSources[2].name).toBe('default'); + expect(metricSources[2].sort).toBe('BBB'); + }); + }); +}); diff --git a/public/app/features/plugins/specs/datasource_srv_specs.ts b/public/app/features/plugins/specs/datasource_srv_specs.ts deleted file mode 100644 index 85a66b59ee78..000000000000 --- a/public/app/features/plugins/specs/datasource_srv_specs.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; -import config from 'app/core/config'; -import 'app/features/plugins/datasource_srv'; - -describe('datasource_srv', function() { - var _datasourceSrv; - var metricSources; - var templateSrv = {}; - - beforeEach(angularMocks.module('grafana.core')); - beforeEach( - angularMocks.module(function($provide) { - $provide.value('templateSrv', templateSrv); - }) - ); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.inject(function(datasourceSrv) { - _datasourceSrv = datasourceSrv; - }) - ); - - describe('when loading metric sources', function() { - var unsortedDatasources = { - mmm: { - type: 'test-db', - meta: { metrics: { m: 1 } }, - }, - '--Grafana--': { - type: 'grafana', - meta: { builtIn: true, metrics: { m: 1 }, id: 'grafana' }, - }, - '--Mixed--': { - type: 'test-db', - meta: { builtIn: true, metrics: { m: 1 }, id: 'mixed' }, - }, - ZZZ: { - type: 'test-db', - meta: { metrics: { m: 1 } }, - }, - aaa: { - type: 'test-db', - meta: { metrics: { m: 1 } }, - }, - BBB: { - type: 'test-db', - meta: { metrics: { m: 1 } }, - }, - }; - beforeEach(function() { - config.datasources = unsortedDatasources; - metricSources = _datasourceSrv.getMetricSources({ skipVariables: true }); - }); - - it('should return a list of sources sorted case insensitively with builtin sources last', function() { - expect(metricSources[0].name).to.be('aaa'); - expect(metricSources[1].name).to.be('BBB'); - expect(metricSources[2].name).to.be('mmm'); - expect(metricSources[3].name).to.be('ZZZ'); - expect(metricSources[4].name).to.be('--Grafana--'); - expect(metricSources[5].name).to.be('--Mixed--'); - }); - }); -}); From b8724ae0c400a9d5f62d2390bce1a5a601540339 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 2 Jul 2018 04:33:39 -0700 Subject: [PATCH 049/263] refactoring: enterprise build/hooks refactorings (#12478) --- pkg/api/dtos/index.go | 1 + pkg/api/frontendsettings.go | 2 +- pkg/api/index.go | 1 + pkg/cmd/grafana-server/main.go | 5 +- pkg/extensions/main.go | 2 +- pkg/metrics/metrics.go | 9 +++ pkg/setting/setting.go | 7 +- public/app/core/config.ts | 19 +++++- public/app/core/services/context_srv.ts | 4 -- .../features/dashboard/specs/exporter.jest.ts | 4 +- .../features/org/partials/team_details.html | 66 +++++++++---------- public/app/features/org/team_details_ctrl.ts | 6 +- public/views/index.template.html | 2 +- 13 files changed, 73 insertions(+), 55 deletions(-) diff --git a/pkg/api/dtos/index.go b/pkg/api/dtos/index.go index 8c7f505277d3..77004899fc30 100644 --- a/pkg/api/dtos/index.go +++ b/pkg/api/dtos/index.go @@ -13,6 +13,7 @@ type IndexViewData struct { Theme string NewGrafanaVersionExists bool NewGrafanaVersion string + AppName string } type PluginCss struct { diff --git a/pkg/api/frontendsettings.go b/pkg/api/frontendsettings.go index 06e6405baaf1..da3c88566c1f 100644 --- a/pkg/api/frontendsettings.go +++ b/pkg/api/frontendsettings.go @@ -132,7 +132,6 @@ func getFrontendSettingsMap(c *m.ReqContext) (map[string]interface{}, error) { } jsonObj := map[string]interface{}{ - "enterprise": setting.Enterprise, "defaultDatasource": defaultDatasource, "datasources": datasources, "panels": panels, @@ -154,6 +153,7 @@ func getFrontendSettingsMap(c *m.ReqContext) (map[string]interface{}, error) { "latestVersion": plugins.GrafanaLatestVersion, "hasUpdate": plugins.GrafanaHasUpdate, "env": setting.Env, + "isEnterprise": setting.IsEnterprise, }, } diff --git a/pkg/api/index.go b/pkg/api/index.go index a52bd3e77b07..ea10940d3ba9 100644 --- a/pkg/api/index.go +++ b/pkg/api/index.go @@ -76,6 +76,7 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { BuildCommit: setting.BuildCommit, NewGrafanaVersion: plugins.GrafanaLatestVersion, NewGrafanaVersionExists: plugins.GrafanaHasUpdate, + AppName: setting.ApplicationName, } if setting.DisableGravatar { diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go index 7b90cf8b1e8f..f00e6bba0fde 100644 --- a/pkg/cmd/grafana-server/main.go +++ b/pkg/cmd/grafana-server/main.go @@ -18,7 +18,7 @@ import ( "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/setting" - _ "github.com/grafana/grafana/pkg/extensions" + extensions "github.com/grafana/grafana/pkg/extensions" _ "github.com/grafana/grafana/pkg/services/alerting/conditions" _ "github.com/grafana/grafana/pkg/services/alerting/notifiers" _ "github.com/grafana/grafana/pkg/tsdb/cloudwatch" @@ -35,7 +35,6 @@ import ( var version = "5.0.0" var commit = "NA" var buildstamp string -var enterprise string var configFile = flag.String("config", "", "path to config file") var homePath = flag.String("homepath", "", "path to grafana install/home path, defaults to working directory") @@ -78,7 +77,7 @@ func main() { setting.BuildVersion = version setting.BuildCommit = commit setting.BuildStamp = buildstampInt64 - setting.Enterprise, _ = strconv.ParseBool(enterprise) + setting.IsEnterprise = extensions.IsEnterprise metrics.M_Grafana_Version.WithLabelValues(version).Set(1) diff --git a/pkg/extensions/main.go b/pkg/extensions/main.go index 34ac9da7e86e..6e3461da8a8f 100644 --- a/pkg/extensions/main.go +++ b/pkg/extensions/main.go @@ -1,3 +1,3 @@ package extensions -import _ "github.com/pkg/errors" +var IsEnterprise bool = false diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go index 3d3cfc2e1b66..4dd84c121517 100644 --- a/pkg/metrics/metrics.go +++ b/pkg/metrics/metrics.go @@ -334,6 +334,14 @@ func updateTotalStats() { var usageStatsURL = "https://stats.grafana.org/grafana-usage-report" +func getEdition() string { + if setting.IsEnterprise { + return "enterprise" + } else { + return "oss" + } +} + func sendUsageStats() { if !setting.ReportingEnabled { return @@ -349,6 +357,7 @@ func sendUsageStats() { "metrics": metrics, "os": runtime.GOOS, "arch": runtime.GOARCH, + "edition": getEdition(), } statsQuery := models.GetSystemStatsQuery{} diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index e71a3619aa57..d8c8e6431c0b 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -18,9 +18,10 @@ import ( "github.com/go-macaron/session" + "time" + "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/util" - "time" ) type Scheme string @@ -49,7 +50,7 @@ var ( BuildVersion string BuildCommit string BuildStamp int64 - Enterprise bool + IsEnterprise bool ApplicationName string // Paths @@ -517,7 +518,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { Raw = cfg.Raw ApplicationName = "Grafana" - if Enterprise { + if IsEnterprise { ApplicationName += " Enterprise" } diff --git a/public/app/core/config.ts b/public/app/core/config.ts index 83ef17662c8e..e065ddb22fb4 100644 --- a/public/app/core/config.ts +++ b/public/app/core/config.ts @@ -1,11 +1,18 @@ import _ from 'lodash'; -class Settings { +export interface BuildInfo { + version: string; + commit: string; + isEnterprise: boolean; + env: string; +} + +export class Settings { datasources: any; panels: any; appSubUrl: string; window_title_prefix: string; - buildInfo: any; + buildInfo: BuildInfo; new_panel_title: string; bootData: any; externalUserMngLinkUrl: string; @@ -22,7 +29,6 @@ class Settings { disableUserSignUp: boolean; loginHint: any; loginError: any; - enterprise: boolean; constructor(options) { var defaults = { @@ -33,7 +39,14 @@ class Settings { playlist_timespan: '1m', unsaved_changes_warning: true, appSubUrl: '', + buildInfo: { + version: 'v1.0', + commit: '1', + env: 'production', + isEnterprise: false, + }, }; + _.extend(this, defaults, options); } } diff --git a/public/app/core/services/context_srv.ts b/public/app/core/services/context_srv.ts index be8a0af7b7b7..8959573e731e 100644 --- a/public/app/core/services/context_srv.ts +++ b/public/app/core/services/context_srv.ts @@ -34,14 +34,10 @@ export class ContextSrv { constructor() { this.sidemenu = store.getBool('grafana.sidemenu', true); - if (!config.buildInfo) { - config.buildInfo = {}; - } if (!config.bootData) { config.bootData = { user: {}, settings: {} }; } - this.version = config.buildInfo.version; this.user = new User(); this.isSignedIn = this.user.isSignedIn; this.isGrafanaAdmin = this.user.isGrafanaAdmin; diff --git a/public/app/features/dashboard/specs/exporter.jest.ts b/public/app/features/dashboard/specs/exporter.jest.ts index aa574a4b85a1..c7727a4af4db 100644 --- a/public/app/features/dashboard/specs/exporter.jest.ts +++ b/public/app/features/dashboard/specs/exporter.jest.ts @@ -86,9 +86,7 @@ describe('given dashboard with repeated panels', () => { ], }; - config.buildInfo = { - version: '3.0.2', - }; + config.buildInfo.version = '3.0.2'; //Stubs test function calls var datasourceSrvStub = { get: jest.fn(arg => getStub(arg)) }; diff --git a/public/app/features/org/partials/team_details.html b/public/app/features/org/partials/team_details.html index c5ac8bd37a3d..3ce851d55463 100644 --- a/public/app/features/org/partials/team_details.html +++ b/public/app/features/org/partials/team_details.html @@ -67,39 +67,39 @@

    Team Members

    -
    +
    -

    Team Group Mapping

    -
    -
    - Add group - -
    -
    - -
    -
    +

    Mappings to external groups

    +
    +
    + Add group + +
    +
    + +
    +
    - - - - - - - - - - - -
    Group
    {{group.groupId}} - - - -
    -
    - - This team has no associated groups yet. - -
    + + + + + + + + + + + +
    Group
    {{group.groupId}} + + + +
    +
    + + This team has no associated groups yet. + +
    -
    +
    diff --git a/public/app/features/org/team_details_ctrl.ts b/public/app/features/org/team_details_ctrl.ts index 35fa04f55d3e..6e0fddafa9d9 100644 --- a/public/app/features/org/team_details_ctrl.ts +++ b/public/app/features/org/team_details_ctrl.ts @@ -7,7 +7,7 @@ export default class TeamDetailsCtrl { navModel: any; teamGroups: TeamGroup[] = []; newGroupId: string; - enterprise: boolean; + isMappingsEnabled: boolean; /** @ngInject **/ constructor(private $scope, private backendSrv, private $routeParams, navModelSrv) { @@ -15,7 +15,7 @@ export default class TeamDetailsCtrl { this.userPicked = this.userPicked.bind(this); this.get = this.get.bind(this); this.newGroupId = ''; - this.enterprise = config.enterprise; + this.isMappingsEnabled = config.buildInfo.isEnterprise; this.get(); } @@ -29,7 +29,7 @@ export default class TeamDetailsCtrl { this.teamMembers = result; }); - if (config.enterprise) { + if (this.isMappingsEnabled) { this.backendSrv.get(`/api/teams/${this.$routeParams.id}/groups`).then(result => { this.teamGroups = result; }); diff --git a/public/views/index.template.html b/public/views/index.template.html index 79da1d7179ca..4140321d633a 100644 --- a/public/views/index.template.html +++ b/public/views/index.template.html @@ -65,7 +65,7 @@
  • - Grafana + [[.AppName]] v[[.BuildVersion]] (commit: [[.BuildCommit]])
  • [[if .NewGrafanaVersionExists]] From 6286c31d4b217eb3526475bfd7e560c6c4a0199a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 2 Jul 2018 14:10:39 +0200 Subject: [PATCH 050/263] refactoring: making api wrap public --- build.go | 1 - pkg/api/api.go | 272 +++++++++++++++++++++++----------------------- pkg/api/common.go | 2 +- 3 files changed, 137 insertions(+), 138 deletions(-) diff --git a/build.go b/build.go index 3f92f8833a27..77cbde50c412 100644 --- a/build.go +++ b/build.go @@ -465,7 +465,6 @@ func ldflags() string { b.WriteString(fmt.Sprintf(" -X main.version=%s", version)) b.WriteString(fmt.Sprintf(" -X main.commit=%s", getGitSha())) b.WriteString(fmt.Sprintf(" -X main.buildstamp=%d", buildStamp())) - b.WriteString(fmt.Sprintf(" -X main.enterprise=%t", enterprise)) return b.String() } diff --git a/pkg/api/api.go b/pkg/api/api.go index 829b77353435..8870b9b095e7 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -24,7 +24,7 @@ func (hs *HTTPServer) registerRoutes() { // not logged in views r.Get("/", reqSignedIn, Index) r.Get("/logout", Logout) - r.Post("/login", quota("session"), bind(dtos.LoginCommand{}), wrap(LoginPost)) + r.Post("/login", quota("session"), bind(dtos.LoginCommand{}), Wrap(LoginPost)) r.Get("/login/:name", quota("session"), OAuthLogin) r.Get("/login", LoginView) r.Get("/invite/:code", Index) @@ -83,20 +83,20 @@ func (hs *HTTPServer) registerRoutes() { // sign up r.Get("/signup", Index) - r.Get("/api/user/signup/options", wrap(GetSignUpOptions)) - r.Post("/api/user/signup", quota("user"), bind(dtos.SignUpForm{}), wrap(SignUp)) - r.Post("/api/user/signup/step2", bind(dtos.SignUpStep2Form{}), wrap(SignUpStep2)) + r.Get("/api/user/signup/options", Wrap(GetSignUpOptions)) + r.Post("/api/user/signup", quota("user"), bind(dtos.SignUpForm{}), Wrap(SignUp)) + r.Post("/api/user/signup/step2", bind(dtos.SignUpStep2Form{}), Wrap(SignUpStep2)) // invited - r.Get("/api/user/invite/:code", wrap(GetInviteInfoByCode)) - r.Post("/api/user/invite/complete", bind(dtos.CompleteInviteForm{}), wrap(CompleteInvite)) + r.Get("/api/user/invite/:code", Wrap(GetInviteInfoByCode)) + r.Post("/api/user/invite/complete", bind(dtos.CompleteInviteForm{}), Wrap(CompleteInvite)) // reset password r.Get("/user/password/send-reset-email", Index) r.Get("/user/password/reset", Index) - r.Post("/api/user/password/send-reset-email", bind(dtos.SendResetPasswordEmailForm{}), wrap(SendResetPasswordEmail)) - r.Post("/api/user/password/reset", bind(dtos.ResetUserPasswordForm{}), wrap(ResetPassword)) + r.Post("/api/user/password/send-reset-email", bind(dtos.SendResetPasswordEmailForm{}), Wrap(SendResetPasswordEmail)) + r.Post("/api/user/password/reset", bind(dtos.ResetUserPasswordForm{}), Wrap(ResetPassword)) // dashboard snapshots r.Get("/dashboard/snapshot/*", Index) @@ -106,8 +106,8 @@ func (hs *HTTPServer) registerRoutes() { r.Post("/api/snapshots/", bind(m.CreateDashboardSnapshotCommand{}), CreateDashboardSnapshot) r.Get("/api/snapshot/shared-options/", GetSharingOptions) r.Get("/api/snapshots/:key", GetDashboardSnapshot) - r.Get("/api/snapshots-delete/:deleteKey", wrap(DeleteDashboardSnapshotByDeleteKey)) - r.Delete("/api/snapshots/:key", reqEditorRole, wrap(DeleteDashboardSnapshot)) + r.Get("/api/snapshots-delete/:deleteKey", Wrap(DeleteDashboardSnapshotByDeleteKey)) + r.Delete("/api/snapshots/:key", reqEditorRole, Wrap(DeleteDashboardSnapshot)) // api renew session based on remember cookie r.Get("/api/login/ping", quota("session"), LoginAPIPing) @@ -117,138 +117,138 @@ func (hs *HTTPServer) registerRoutes() { // user (signed in) apiRoute.Group("/user", func(userRoute routing.RouteRegister) { - userRoute.Get("/", wrap(GetSignedInUser)) - userRoute.Put("/", bind(m.UpdateUserCommand{}), wrap(UpdateSignedInUser)) - userRoute.Post("/using/:id", wrap(UserSetUsingOrg)) - userRoute.Get("/orgs", wrap(GetSignedInUserOrgList)) + userRoute.Get("/", Wrap(GetSignedInUser)) + userRoute.Put("/", bind(m.UpdateUserCommand{}), Wrap(UpdateSignedInUser)) + userRoute.Post("/using/:id", Wrap(UserSetUsingOrg)) + userRoute.Get("/orgs", Wrap(GetSignedInUserOrgList)) - userRoute.Post("/stars/dashboard/:id", wrap(StarDashboard)) - userRoute.Delete("/stars/dashboard/:id", wrap(UnstarDashboard)) + userRoute.Post("/stars/dashboard/:id", Wrap(StarDashboard)) + userRoute.Delete("/stars/dashboard/:id", Wrap(UnstarDashboard)) - userRoute.Put("/password", bind(m.ChangeUserPasswordCommand{}), wrap(ChangeUserPassword)) - userRoute.Get("/quotas", wrap(GetUserQuotas)) - userRoute.Put("/helpflags/:id", wrap(SetHelpFlag)) + userRoute.Put("/password", bind(m.ChangeUserPasswordCommand{}), Wrap(ChangeUserPassword)) + userRoute.Get("/quotas", Wrap(GetUserQuotas)) + userRoute.Put("/helpflags/:id", Wrap(SetHelpFlag)) // For dev purpose - userRoute.Get("/helpflags/clear", wrap(ClearHelpFlags)) + userRoute.Get("/helpflags/clear", Wrap(ClearHelpFlags)) - userRoute.Get("/preferences", wrap(GetUserPreferences)) - userRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateUserPreferences)) + userRoute.Get("/preferences", Wrap(GetUserPreferences)) + userRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), Wrap(UpdateUserPreferences)) }) // users (admin permission required) apiRoute.Group("/users", func(usersRoute routing.RouteRegister) { - usersRoute.Get("/", wrap(SearchUsers)) - usersRoute.Get("/search", wrap(SearchUsersWithPaging)) - usersRoute.Get("/:id", wrap(GetUserByID)) - usersRoute.Get("/:id/orgs", wrap(GetUserOrgList)) + usersRoute.Get("/", Wrap(SearchUsers)) + usersRoute.Get("/search", Wrap(SearchUsersWithPaging)) + usersRoute.Get("/:id", Wrap(GetUserByID)) + usersRoute.Get("/:id/orgs", Wrap(GetUserOrgList)) // query parameters /users/lookup?loginOrEmail=admin@example.com - usersRoute.Get("/lookup", wrap(GetUserByLoginOrEmail)) - usersRoute.Put("/:id", bind(m.UpdateUserCommand{}), wrap(UpdateUser)) - usersRoute.Post("/:id/using/:orgId", wrap(UpdateUserActiveOrg)) + usersRoute.Get("/lookup", Wrap(GetUserByLoginOrEmail)) + usersRoute.Put("/:id", bind(m.UpdateUserCommand{}), Wrap(UpdateUser)) + usersRoute.Post("/:id/using/:orgId", Wrap(UpdateUserActiveOrg)) }, reqGrafanaAdmin) // team (admin permission required) apiRoute.Group("/teams", func(teamsRoute routing.RouteRegister) { - teamsRoute.Post("/", bind(m.CreateTeamCommand{}), wrap(CreateTeam)) - teamsRoute.Put("/:teamId", bind(m.UpdateTeamCommand{}), wrap(UpdateTeam)) - teamsRoute.Delete("/:teamId", wrap(DeleteTeamByID)) - teamsRoute.Get("/:teamId/members", wrap(GetTeamMembers)) - teamsRoute.Post("/:teamId/members", bind(m.AddTeamMemberCommand{}), wrap(AddTeamMember)) - teamsRoute.Delete("/:teamId/members/:userId", wrap(RemoveTeamMember)) + teamsRoute.Post("/", bind(m.CreateTeamCommand{}), Wrap(CreateTeam)) + teamsRoute.Put("/:teamId", bind(m.UpdateTeamCommand{}), Wrap(UpdateTeam)) + teamsRoute.Delete("/:teamId", Wrap(DeleteTeamByID)) + teamsRoute.Get("/:teamId/members", Wrap(GetTeamMembers)) + teamsRoute.Post("/:teamId/members", bind(m.AddTeamMemberCommand{}), Wrap(AddTeamMember)) + teamsRoute.Delete("/:teamId/members/:userId", Wrap(RemoveTeamMember)) }, reqOrgAdmin) // team without requirement of user to be org admin apiRoute.Group("/teams", func(teamsRoute routing.RouteRegister) { - teamsRoute.Get("/:teamId", wrap(GetTeamByID)) - teamsRoute.Get("/search", wrap(SearchTeams)) + teamsRoute.Get("/:teamId", Wrap(GetTeamByID)) + teamsRoute.Get("/search", Wrap(SearchTeams)) }) // org information available to all users. apiRoute.Group("/org", func(orgRoute routing.RouteRegister) { - orgRoute.Get("/", wrap(GetOrgCurrent)) - orgRoute.Get("/quotas", wrap(GetOrgQuotas)) + orgRoute.Get("/", Wrap(GetOrgCurrent)) + orgRoute.Get("/quotas", Wrap(GetOrgQuotas)) }) // current org apiRoute.Group("/org", func(orgRoute routing.RouteRegister) { - orgRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrgCurrent)) - orgRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddressCurrent)) - orgRoute.Post("/users", quota("user"), bind(m.AddOrgUserCommand{}), wrap(AddOrgUserToCurrentOrg)) - orgRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), wrap(UpdateOrgUserForCurrentOrg)) - orgRoute.Delete("/users/:userId", wrap(RemoveOrgUserForCurrentOrg)) + orgRoute.Put("/", bind(dtos.UpdateOrgForm{}), Wrap(UpdateOrgCurrent)) + orgRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), Wrap(UpdateOrgAddressCurrent)) + orgRoute.Post("/users", quota("user"), bind(m.AddOrgUserCommand{}), Wrap(AddOrgUserToCurrentOrg)) + orgRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), Wrap(UpdateOrgUserForCurrentOrg)) + orgRoute.Delete("/users/:userId", Wrap(RemoveOrgUserForCurrentOrg)) // invites - orgRoute.Get("/invites", wrap(GetPendingOrgInvites)) - orgRoute.Post("/invites", quota("user"), bind(dtos.AddInviteForm{}), wrap(AddOrgInvite)) - orgRoute.Patch("/invites/:code/revoke", wrap(RevokeInvite)) + orgRoute.Get("/invites", Wrap(GetPendingOrgInvites)) + orgRoute.Post("/invites", quota("user"), bind(dtos.AddInviteForm{}), Wrap(AddOrgInvite)) + orgRoute.Patch("/invites/:code/revoke", Wrap(RevokeInvite)) // prefs - orgRoute.Get("/preferences", wrap(GetOrgPreferences)) - orgRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateOrgPreferences)) + orgRoute.Get("/preferences", Wrap(GetOrgPreferences)) + orgRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), Wrap(UpdateOrgPreferences)) }, reqOrgAdmin) // current org without requirement of user to be org admin apiRoute.Group("/org", func(orgRoute routing.RouteRegister) { - orgRoute.Get("/users", wrap(GetOrgUsersForCurrentOrg)) + orgRoute.Get("/users", Wrap(GetOrgUsersForCurrentOrg)) }) // create new org - apiRoute.Post("/orgs", quota("org"), bind(m.CreateOrgCommand{}), wrap(CreateOrg)) + apiRoute.Post("/orgs", quota("org"), bind(m.CreateOrgCommand{}), Wrap(CreateOrg)) // search all orgs - apiRoute.Get("/orgs", reqGrafanaAdmin, wrap(SearchOrgs)) + apiRoute.Get("/orgs", reqGrafanaAdmin, Wrap(SearchOrgs)) // orgs (admin routes) apiRoute.Group("/orgs/:orgId", func(orgsRoute routing.RouteRegister) { - orgsRoute.Get("/", wrap(GetOrgByID)) - orgsRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrg)) - orgsRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddress)) - orgsRoute.Delete("/", wrap(DeleteOrgByID)) - orgsRoute.Get("/users", wrap(GetOrgUsers)) - orgsRoute.Post("/users", bind(m.AddOrgUserCommand{}), wrap(AddOrgUser)) - orgsRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), wrap(UpdateOrgUser)) - orgsRoute.Delete("/users/:userId", wrap(RemoveOrgUser)) - orgsRoute.Get("/quotas", wrap(GetOrgQuotas)) - orgsRoute.Put("/quotas/:target", bind(m.UpdateOrgQuotaCmd{}), wrap(UpdateOrgQuota)) + orgsRoute.Get("/", Wrap(GetOrgByID)) + orgsRoute.Put("/", bind(dtos.UpdateOrgForm{}), Wrap(UpdateOrg)) + orgsRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), Wrap(UpdateOrgAddress)) + orgsRoute.Delete("/", Wrap(DeleteOrgByID)) + orgsRoute.Get("/users", Wrap(GetOrgUsers)) + orgsRoute.Post("/users", bind(m.AddOrgUserCommand{}), Wrap(AddOrgUser)) + orgsRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), Wrap(UpdateOrgUser)) + orgsRoute.Delete("/users/:userId", Wrap(RemoveOrgUser)) + orgsRoute.Get("/quotas", Wrap(GetOrgQuotas)) + orgsRoute.Put("/quotas/:target", bind(m.UpdateOrgQuotaCmd{}), Wrap(UpdateOrgQuota)) }, reqGrafanaAdmin) // orgs (admin routes) apiRoute.Group("/orgs/name/:name", func(orgsRoute routing.RouteRegister) { - orgsRoute.Get("/", wrap(GetOrgByName)) + orgsRoute.Get("/", Wrap(GetOrgByName)) }, reqGrafanaAdmin) // auth api keys apiRoute.Group("/auth/keys", func(keysRoute routing.RouteRegister) { - keysRoute.Get("/", wrap(GetAPIKeys)) - keysRoute.Post("/", quota("api_key"), bind(m.AddApiKeyCommand{}), wrap(AddAPIKey)) - keysRoute.Delete("/:id", wrap(DeleteAPIKey)) + keysRoute.Get("/", Wrap(GetAPIKeys)) + keysRoute.Post("/", quota("api_key"), bind(m.AddApiKeyCommand{}), Wrap(AddAPIKey)) + keysRoute.Delete("/:id", Wrap(DeleteAPIKey)) }, reqOrgAdmin) // Preferences apiRoute.Group("/preferences", func(prefRoute routing.RouteRegister) { - prefRoute.Post("/set-home-dash", bind(m.SavePreferencesCommand{}), wrap(SetHomeDashboard)) + prefRoute.Post("/set-home-dash", bind(m.SavePreferencesCommand{}), Wrap(SetHomeDashboard)) }) // Data sources apiRoute.Group("/datasources", func(datasourceRoute routing.RouteRegister) { - datasourceRoute.Get("/", wrap(GetDataSources)) - datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), wrap(AddDataSource)) - datasourceRoute.Put("/:id", bind(m.UpdateDataSourceCommand{}), wrap(UpdateDataSource)) - datasourceRoute.Delete("/:id", wrap(DeleteDataSourceByID)) - datasourceRoute.Delete("/name/:name", wrap(DeleteDataSourceByName)) - datasourceRoute.Get("/:id", wrap(GetDataSourceByID)) - datasourceRoute.Get("/name/:name", wrap(GetDataSourceByName)) + datasourceRoute.Get("/", Wrap(GetDataSources)) + datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), Wrap(AddDataSource)) + datasourceRoute.Put("/:id", bind(m.UpdateDataSourceCommand{}), Wrap(UpdateDataSource)) + datasourceRoute.Delete("/:id", Wrap(DeleteDataSourceByID)) + datasourceRoute.Delete("/name/:name", Wrap(DeleteDataSourceByName)) + datasourceRoute.Get("/:id", Wrap(GetDataSourceByID)) + datasourceRoute.Get("/name/:name", Wrap(GetDataSourceByName)) }, reqOrgAdmin) - apiRoute.Get("/datasources/id/:name", wrap(GetDataSourceIDByName), reqSignedIn) + apiRoute.Get("/datasources/id/:name", Wrap(GetDataSourceIDByName), reqSignedIn) - apiRoute.Get("/plugins", wrap(GetPluginList)) - apiRoute.Get("/plugins/:pluginId/settings", wrap(GetPluginSettingByID)) - apiRoute.Get("/plugins/:pluginId/markdown/:name", wrap(GetPluginMarkdown)) + apiRoute.Get("/plugins", Wrap(GetPluginList)) + apiRoute.Get("/plugins/:pluginId/settings", Wrap(GetPluginSettingByID)) + apiRoute.Get("/plugins/:pluginId/markdown/:name", Wrap(GetPluginMarkdown)) apiRoute.Group("/plugins", func(pluginRoute routing.RouteRegister) { - pluginRoute.Get("/:pluginId/dashboards/", wrap(GetPluginDashboards)) - pluginRoute.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), wrap(UpdatePluginSetting)) + pluginRoute.Get("/:pluginId/dashboards/", Wrap(GetPluginDashboards)) + pluginRoute.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), Wrap(UpdatePluginSetting)) }, reqOrgAdmin) apiRoute.Get("/frontend/settings/", GetFrontendSettings) @@ -257,106 +257,106 @@ func (hs *HTTPServer) registerRoutes() { // Folders apiRoute.Group("/folders", func(folderRoute routing.RouteRegister) { - folderRoute.Get("/", wrap(GetFolders)) - folderRoute.Get("/id/:id", wrap(GetFolderByID)) - folderRoute.Post("/", bind(m.CreateFolderCommand{}), wrap(CreateFolder)) + folderRoute.Get("/", Wrap(GetFolders)) + folderRoute.Get("/id/:id", Wrap(GetFolderByID)) + folderRoute.Post("/", bind(m.CreateFolderCommand{}), Wrap(CreateFolder)) folderRoute.Group("/:uid", func(folderUidRoute routing.RouteRegister) { - folderUidRoute.Get("/", wrap(GetFolderByUID)) - folderUidRoute.Put("/", bind(m.UpdateFolderCommand{}), wrap(UpdateFolder)) - folderUidRoute.Delete("/", wrap(DeleteFolder)) + folderUidRoute.Get("/", Wrap(GetFolderByUID)) + folderUidRoute.Put("/", bind(m.UpdateFolderCommand{}), Wrap(UpdateFolder)) + folderUidRoute.Delete("/", Wrap(DeleteFolder)) folderUidRoute.Group("/permissions", func(folderPermissionRoute routing.RouteRegister) { - folderPermissionRoute.Get("/", wrap(GetFolderPermissionList)) - folderPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateFolderPermissions)) + folderPermissionRoute.Get("/", Wrap(GetFolderPermissionList)) + folderPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), Wrap(UpdateFolderPermissions)) }) }) }) // Dashboard apiRoute.Group("/dashboards", func(dashboardRoute routing.RouteRegister) { - dashboardRoute.Get("/uid/:uid", wrap(GetDashboard)) - dashboardRoute.Delete("/uid/:uid", wrap(DeleteDashboardByUID)) + dashboardRoute.Get("/uid/:uid", Wrap(GetDashboard)) + dashboardRoute.Delete("/uid/:uid", Wrap(DeleteDashboardByUID)) - dashboardRoute.Get("/db/:slug", wrap(GetDashboard)) - dashboardRoute.Delete("/db/:slug", wrap(DeleteDashboard)) + dashboardRoute.Get("/db/:slug", Wrap(GetDashboard)) + dashboardRoute.Delete("/db/:slug", Wrap(DeleteDashboard)) - dashboardRoute.Post("/calculate-diff", bind(dtos.CalculateDiffOptions{}), wrap(CalculateDashboardDiff)) + dashboardRoute.Post("/calculate-diff", bind(dtos.CalculateDiffOptions{}), Wrap(CalculateDashboardDiff)) - dashboardRoute.Post("/db", bind(m.SaveDashboardCommand{}), wrap(PostDashboard)) - dashboardRoute.Get("/home", wrap(GetHomeDashboard)) + dashboardRoute.Post("/db", bind(m.SaveDashboardCommand{}), Wrap(PostDashboard)) + dashboardRoute.Get("/home", Wrap(GetHomeDashboard)) dashboardRoute.Get("/tags", GetDashboardTags) - dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), wrap(ImportDashboard)) + dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), Wrap(ImportDashboard)) dashboardRoute.Group("/id/:dashboardId", func(dashIdRoute routing.RouteRegister) { - dashIdRoute.Get("/versions", wrap(GetDashboardVersions)) - dashIdRoute.Get("/versions/:id", wrap(GetDashboardVersion)) - dashIdRoute.Post("/restore", bind(dtos.RestoreDashboardVersionCommand{}), wrap(RestoreDashboardVersion)) + dashIdRoute.Get("/versions", Wrap(GetDashboardVersions)) + dashIdRoute.Get("/versions/:id", Wrap(GetDashboardVersion)) + dashIdRoute.Post("/restore", bind(dtos.RestoreDashboardVersionCommand{}), Wrap(RestoreDashboardVersion)) dashIdRoute.Group("/permissions", func(dashboardPermissionRoute routing.RouteRegister) { - dashboardPermissionRoute.Get("/", wrap(GetDashboardPermissionList)) - dashboardPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateDashboardPermissions)) + dashboardPermissionRoute.Get("/", Wrap(GetDashboardPermissionList)) + dashboardPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), Wrap(UpdateDashboardPermissions)) }) }) }) // Dashboard snapshots apiRoute.Group("/dashboard/snapshots", func(dashboardRoute routing.RouteRegister) { - dashboardRoute.Get("/", wrap(SearchDashboardSnapshots)) + dashboardRoute.Get("/", Wrap(SearchDashboardSnapshots)) }) // Playlist apiRoute.Group("/playlists", func(playlistRoute routing.RouteRegister) { - playlistRoute.Get("/", wrap(SearchPlaylists)) - playlistRoute.Get("/:id", ValidateOrgPlaylist, wrap(GetPlaylist)) - playlistRoute.Get("/:id/items", ValidateOrgPlaylist, wrap(GetPlaylistItems)) - playlistRoute.Get("/:id/dashboards", ValidateOrgPlaylist, wrap(GetPlaylistDashboards)) - playlistRoute.Delete("/:id", reqEditorRole, ValidateOrgPlaylist, wrap(DeletePlaylist)) - playlistRoute.Put("/:id", reqEditorRole, bind(m.UpdatePlaylistCommand{}), ValidateOrgPlaylist, wrap(UpdatePlaylist)) - playlistRoute.Post("/", reqEditorRole, bind(m.CreatePlaylistCommand{}), wrap(CreatePlaylist)) + playlistRoute.Get("/", Wrap(SearchPlaylists)) + playlistRoute.Get("/:id", ValidateOrgPlaylist, Wrap(GetPlaylist)) + playlistRoute.Get("/:id/items", ValidateOrgPlaylist, Wrap(GetPlaylistItems)) + playlistRoute.Get("/:id/dashboards", ValidateOrgPlaylist, Wrap(GetPlaylistDashboards)) + playlistRoute.Delete("/:id", reqEditorRole, ValidateOrgPlaylist, Wrap(DeletePlaylist)) + playlistRoute.Put("/:id", reqEditorRole, bind(m.UpdatePlaylistCommand{}), ValidateOrgPlaylist, Wrap(UpdatePlaylist)) + playlistRoute.Post("/", reqEditorRole, bind(m.CreatePlaylistCommand{}), Wrap(CreatePlaylist)) }) // Search apiRoute.Get("/search/", Search) // metrics - apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), wrap(QueryMetrics)) - apiRoute.Get("/tsdb/testdata/scenarios", wrap(GetTestDataScenarios)) - apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, wrap(GenerateSQLTestData)) - apiRoute.Get("/tsdb/testdata/random-walk", wrap(GetTestDataRandomWalk)) + apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), Wrap(QueryMetrics)) + apiRoute.Get("/tsdb/testdata/scenarios", Wrap(GetTestDataScenarios)) + apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, Wrap(GenerateSQLTestData)) + apiRoute.Get("/tsdb/testdata/random-walk", Wrap(GetTestDataRandomWalk)) apiRoute.Group("/alerts", func(alertsRoute routing.RouteRegister) { - alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), wrap(AlertTest)) - alertsRoute.Post("/:alertId/pause", reqEditorRole, bind(dtos.PauseAlertCommand{}), wrap(PauseAlert)) - alertsRoute.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert)) - alertsRoute.Get("/", wrap(GetAlerts)) - alertsRoute.Get("/states-for-dashboard", wrap(GetAlertStatesForDashboard)) + alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), Wrap(AlertTest)) + alertsRoute.Post("/:alertId/pause", reqEditorRole, bind(dtos.PauseAlertCommand{}), Wrap(PauseAlert)) + alertsRoute.Get("/:alertId", ValidateOrgAlert, Wrap(GetAlert)) + alertsRoute.Get("/", Wrap(GetAlerts)) + alertsRoute.Get("/states-for-dashboard", Wrap(GetAlertStatesForDashboard)) }) - apiRoute.Get("/alert-notifications", wrap(GetAlertNotifications)) - apiRoute.Get("/alert-notifiers", wrap(GetAlertNotifiers)) + apiRoute.Get("/alert-notifications", Wrap(GetAlertNotifications)) + apiRoute.Get("/alert-notifiers", Wrap(GetAlertNotifiers)) apiRoute.Group("/alert-notifications", func(alertNotifications routing.RouteRegister) { - alertNotifications.Post("/test", bind(dtos.NotificationTestCommand{}), wrap(NotificationTest)) - alertNotifications.Post("/", bind(m.CreateAlertNotificationCommand{}), wrap(CreateAlertNotification)) - alertNotifications.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), wrap(UpdateAlertNotification)) - alertNotifications.Get("/:notificationId", wrap(GetAlertNotificationByID)) - alertNotifications.Delete("/:notificationId", wrap(DeleteAlertNotification)) + alertNotifications.Post("/test", bind(dtos.NotificationTestCommand{}), Wrap(NotificationTest)) + alertNotifications.Post("/", bind(m.CreateAlertNotificationCommand{}), Wrap(CreateAlertNotification)) + alertNotifications.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), Wrap(UpdateAlertNotification)) + alertNotifications.Get("/:notificationId", Wrap(GetAlertNotificationByID)) + alertNotifications.Delete("/:notificationId", Wrap(DeleteAlertNotification)) }, reqEditorRole) - apiRoute.Get("/annotations", wrap(GetAnnotations)) - apiRoute.Post("/annotations/mass-delete", reqOrgAdmin, bind(dtos.DeleteAnnotationsCmd{}), wrap(DeleteAnnotations)) + apiRoute.Get("/annotations", Wrap(GetAnnotations)) + apiRoute.Post("/annotations/mass-delete", reqOrgAdmin, bind(dtos.DeleteAnnotationsCmd{}), Wrap(DeleteAnnotations)) apiRoute.Group("/annotations", func(annotationsRoute routing.RouteRegister) { - annotationsRoute.Post("/", bind(dtos.PostAnnotationsCmd{}), wrap(PostAnnotation)) - annotationsRoute.Delete("/:annotationId", wrap(DeleteAnnotationByID)) - annotationsRoute.Put("/:annotationId", bind(dtos.UpdateAnnotationsCmd{}), wrap(UpdateAnnotation)) - annotationsRoute.Delete("/region/:regionId", wrap(DeleteAnnotationRegion)) - annotationsRoute.Post("/graphite", reqEditorRole, bind(dtos.PostGraphiteAnnotationsCmd{}), wrap(PostGraphiteAnnotation)) + annotationsRoute.Post("/", bind(dtos.PostAnnotationsCmd{}), Wrap(PostAnnotation)) + annotationsRoute.Delete("/:annotationId", Wrap(DeleteAnnotationByID)) + annotationsRoute.Put("/:annotationId", bind(dtos.UpdateAnnotationsCmd{}), Wrap(UpdateAnnotation)) + annotationsRoute.Delete("/region/:regionId", Wrap(DeleteAnnotationRegion)) + annotationsRoute.Post("/graphite", reqEditorRole, bind(dtos.PostGraphiteAnnotationsCmd{}), Wrap(PostGraphiteAnnotation)) }) // error test - r.Get("/metrics/error", wrap(GenerateError)) + r.Get("/metrics/error", Wrap(GenerateError)) }, reqSignedIn) @@ -367,10 +367,10 @@ func (hs *HTTPServer) registerRoutes() { adminRoute.Put("/users/:id/password", bind(dtos.AdminUpdateUserPasswordForm{}), AdminUpdateUserPassword) adminRoute.Put("/users/:id/permissions", bind(dtos.AdminUpdateUserPermissionsForm{}), AdminUpdateUserPermissions) adminRoute.Delete("/users/:id", AdminDeleteUser) - adminRoute.Get("/users/:id/quotas", wrap(GetUserQuotas)) - adminRoute.Put("/users/:id/quotas/:target", bind(m.UpdateUserQuotaCmd{}), wrap(UpdateUserQuota)) + adminRoute.Get("/users/:id/quotas", Wrap(GetUserQuotas)) + adminRoute.Put("/users/:id/quotas/:target", bind(m.UpdateUserQuotaCmd{}), Wrap(UpdateUserQuota)) adminRoute.Get("/stats", AdminGetStats) - adminRoute.Post("/pause-all-alerts", bind(dtos.PauseAllAlertsCommand{}), wrap(PauseAllAlerts)) + adminRoute.Post("/pause-all-alerts", bind(dtos.PauseAllAlertsCommand{}), Wrap(PauseAllAlerts)) }, reqGrafanaAdmin) // rendering diff --git a/pkg/api/common.go b/pkg/api/common.go index 97f41ff7c72b..7973c72c8fa8 100644 --- a/pkg/api/common.go +++ b/pkg/api/common.go @@ -30,7 +30,7 @@ type NormalResponse struct { err error } -func wrap(action interface{}) macaron.Handler { +func Wrap(action interface{}) macaron.Handler { return func(c *m.ReqContext) { var res Response From 8d6797c367bbb45228cf7e198808981fa2c0bffe Mon Sep 17 00:00:00 2001 From: Leonard Gram Date: Mon, 2 Jul 2018 16:05:23 +0200 Subject: [PATCH 051/263] ci: publishes grafana enterprise to s3. --- .circleci/config.yml | 36 +++++++++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9b729ecfa157..7c8fad409054 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -160,7 +160,7 @@ jobs: steps: - checkout - run: - name: build and package grafana + name: build, test and package grafana enterprise command: './scripts/build/build_enterprise.sh' - run: name: sign packages @@ -168,6 +168,26 @@ jobs: - run: name: sha-sum packages command: 'go run build.go sha-dist' + -run: + name: move enterprise packages into their own folder + command: 'mv dist enterprise-dist' + - persist_to_workspace: + root: . + paths: + - enterprise-dist/grafana* + + deploy-enterprise-master: + docker: + - image: circleci/python:2.7-stretch + steps: + - attach_workspace: + at: . + - run: + name: install awscli + command: 'sudo pip install awscli' + - run: + name: deploy to s3 + command: 'aws s3 sync ./enterprise-dist s3://$ENTERPRISE_BUCKET_NAME/master' deploy-master: docker: @@ -247,6 +267,20 @@ workflows: filters: branches: only: master + - deploy-enterprise-master: + requires: + - build-all + - test-backend + - test-frontend + - codespell + - gometalinter + - mysql-integration-test + - postgres-integration-test + - build-enterprise + filters: + branches: + only: master + release: jobs: - build-all: From 4245f0ce119f8f016243844bd58de01c9adc1b51 Mon Sep 17 00:00:00 2001 From: Leonard Gram Date: Mon, 2 Jul 2018 16:10:20 +0200 Subject: [PATCH 052/263] ci: typo --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 7c8fad409054..0217c129a623 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -168,7 +168,7 @@ jobs: - run: name: sha-sum packages command: 'go run build.go sha-dist' - -run: + - run: name: move enterprise packages into their own folder command: 'mv dist enterprise-dist' - persist_to_workspace: From 5e08bf5130e5f118afb1c43b36e62e116f369ec2 Mon Sep 17 00:00:00 2001 From: Leonard Gram Date: Mon, 2 Jul 2018 17:13:59 +0200 Subject: [PATCH 053/263] test: fixed usage of wrap in tests. --- pkg/api/alerting_test.go | 2 +- pkg/api/annotations_test.go | 6 +++--- pkg/api/common_test.go | 4 ++-- pkg/api/dashboard_permission_test.go | 2 +- pkg/api/dashboard_test.go | 4 ++-- pkg/api/folder_permission_test.go | 2 +- pkg/api/folder_test.go | 4 ++-- 7 files changed, 12 insertions(+), 12 deletions(-) diff --git a/pkg/api/alerting_test.go b/pkg/api/alerting_test.go index abfdfb663221..9eba0e0d5b64 100644 --- a/pkg/api/alerting_test.go +++ b/pkg/api/alerting_test.go @@ -135,7 +135,7 @@ func postAlertScenario(desc string, url string, routePattern string, role m.Role defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.UserId = TestUserID sc.context.OrgId = TestOrgID diff --git a/pkg/api/annotations_test.go b/pkg/api/annotations_test.go index e5f63ce022b6..6590eb19ff28 100644 --- a/pkg/api/annotations_test.go +++ b/pkg/api/annotations_test.go @@ -223,7 +223,7 @@ func postAnnotationScenario(desc string, url string, routePattern string, role m defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.UserId = TestUserID sc.context.OrgId = TestOrgID @@ -246,7 +246,7 @@ func putAnnotationScenario(desc string, url string, routePattern string, role m. defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.UserId = TestUserID sc.context.OrgId = TestOrgID @@ -269,7 +269,7 @@ func deleteAnnotationsScenario(desc string, url string, routePattern string, rol defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.UserId = TestUserID sc.context.OrgId = TestOrgID diff --git a/pkg/api/common_test.go b/pkg/api/common_test.go index 40c438b607a3..8b66a7a468be 100644 --- a/pkg/api/common_test.go +++ b/pkg/api/common_test.go @@ -23,7 +23,7 @@ func loggedInUserScenarioWithRole(desc string, method string, url string, routeP defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.UserId = TestUserID sc.context.OrgId = TestOrgID @@ -51,7 +51,7 @@ func anonymousUserScenario(desc string, method string, url string, routePattern defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c if sc.handlerFunc != nil { return sc.handlerFunc(sc.context) diff --git a/pkg/api/dashboard_permission_test.go b/pkg/api/dashboard_permission_test.go index 24f0bdca365b..f65c5f1f5fa2 100644 --- a/pkg/api/dashboard_permission_test.go +++ b/pkg/api/dashboard_permission_test.go @@ -194,7 +194,7 @@ func updateDashboardPermissionScenario(desc string, url string, routePattern str sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.OrgId = TestOrgID sc.context.UserId = TestUserID diff --git a/pkg/api/dashboard_test.go b/pkg/api/dashboard_test.go index ccde23827871..50a2e314f5ca 100644 --- a/pkg/api/dashboard_test.go +++ b/pkg/api/dashboard_test.go @@ -882,7 +882,7 @@ func postDashboardScenario(desc string, url string, routePattern string, mock *d defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.SignedInUser = &m.SignedInUser{OrgId: cmd.OrgId, UserId: cmd.UserId} @@ -907,7 +907,7 @@ func postDiffScenario(desc string, url string, routePattern string, cmd dtos.Cal defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.SignedInUser = &m.SignedInUser{ OrgId: TestOrgID, diff --git a/pkg/api/folder_permission_test.go b/pkg/api/folder_permission_test.go index f7458af6dce4..64a746ca9376 100644 --- a/pkg/api/folder_permission_test.go +++ b/pkg/api/folder_permission_test.go @@ -226,7 +226,7 @@ func updateFolderPermissionScenario(desc string, url string, routePattern string sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.OrgId = TestOrgID sc.context.UserId = TestUserID diff --git a/pkg/api/folder_test.go b/pkg/api/folder_test.go index 0d9b9495686a..6e24e432535c 100644 --- a/pkg/api/folder_test.go +++ b/pkg/api/folder_test.go @@ -152,7 +152,7 @@ func createFolderScenario(desc string, url string, routePattern string, mock *fa defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.SignedInUser = &m.SignedInUser{OrgId: TestOrgID, UserId: TestUserID} @@ -181,7 +181,7 @@ func updateFolderScenario(desc string, url string, routePattern string, mock *fa defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.SignedInUser = &m.SignedInUser{OrgId: TestOrgID, UserId: TestUserID} From 37efa934f16de14630c28c190162105a45040dda Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 2 Jul 2018 17:25:44 +0200 Subject: [PATCH 054/263] ldap: improved ldap test env with more structured users and groups --- .gitignore | 1 + docker/blocks/openldap/entrypoint.sh | 10 +++++++++- docker/blocks/openldap/prepopulate/admin.ldif | 10 ---------- docker/blocks/openldap/prepopulate/adminsgroup.ldif | 5 ----- docker/blocks/openldap/prepopulate/editor.ldif | 10 ---------- docker/blocks/openldap/prepopulate/groups/admins.ldif | 5 +++++ .../blocks/openldap/prepopulate/groups/backend.ldif | 5 +++++ docker/blocks/openldap/prepopulate/groups/editor.ldif | 5 +++++ .../blocks/openldap/prepopulate/groups/frontend.ldif | 5 +++++ docker/blocks/openldap/prepopulate/units/groups.ldif | 3 +++ docker/blocks/openldap/prepopulate/units/users.ldif | 3 +++ .../blocks/openldap/prepopulate/users/ldap-admin.ldif | 11 +++++++++++ .../openldap/prepopulate/users/ldap-editor.ldif | 10 ++++++++++ .../openldap/prepopulate/users/ldap-frontend-1.ldif | 10 ++++++++++ .../{viewer.ldif => users/ldap-viewer.ldif} | 8 ++++---- docker/blocks/openldap/prepopulate/usersgroup.ldif | 5 ----- 16 files changed, 71 insertions(+), 35 deletions(-) delete mode 100644 docker/blocks/openldap/prepopulate/admin.ldif delete mode 100644 docker/blocks/openldap/prepopulate/adminsgroup.ldif delete mode 100644 docker/blocks/openldap/prepopulate/editor.ldif create mode 100644 docker/blocks/openldap/prepopulate/groups/admins.ldif create mode 100644 docker/blocks/openldap/prepopulate/groups/backend.ldif create mode 100644 docker/blocks/openldap/prepopulate/groups/editor.ldif create mode 100644 docker/blocks/openldap/prepopulate/groups/frontend.ldif create mode 100644 docker/blocks/openldap/prepopulate/units/groups.ldif create mode 100644 docker/blocks/openldap/prepopulate/units/users.ldif create mode 100644 docker/blocks/openldap/prepopulate/users/ldap-admin.ldif create mode 100644 docker/blocks/openldap/prepopulate/users/ldap-editor.ldif create mode 100644 docker/blocks/openldap/prepopulate/users/ldap-frontend-1.ldif rename docker/blocks/openldap/prepopulate/{viewer.ldif => users/ldap-viewer.ldif} (52%) delete mode 100644 docker/blocks/openldap/prepopulate/usersgroup.ldif diff --git a/.gitignore b/.gitignore index 25325b378901..accc24d84cdc 100644 --- a/.gitignore +++ b/.gitignore @@ -43,6 +43,7 @@ fig.yml docker-compose.yml docker-compose.yaml /conf/provisioning/**/custom.yaml +/conf/ldap_dev.toml profile.cov /grafana /local diff --git a/docker/blocks/openldap/entrypoint.sh b/docker/blocks/openldap/entrypoint.sh index d560b78d3885..2f383355907d 100755 --- a/docker/blocks/openldap/entrypoint.sh +++ b/docker/blocks/openldap/entrypoint.sh @@ -80,7 +80,15 @@ EOF done fi - for file in `ls /etc/ldap/prepopulate/*.ldif`; do + for file in `ls /etc/ldap/prepopulate/units/*.ldif`; do + slapadd -F /etc/ldap/slapd.d -l "$file" + done + + for file in `ls /etc/ldap/prepopulate/groups/*.ldif`; do + slapadd -F /etc/ldap/slapd.d -l "$file" + done + + for file in `ls /etc/ldap/prepopulate/users/*.ldif`; do slapadd -F /etc/ldap/slapd.d -l "$file" done diff --git a/docker/blocks/openldap/prepopulate/admin.ldif b/docker/blocks/openldap/prepopulate/admin.ldif deleted file mode 100644 index 3f4406d5810e..000000000000 --- a/docker/blocks/openldap/prepopulate/admin.ldif +++ /dev/null @@ -1,10 +0,0 @@ -dn: cn=ldapadmin,dc=grafana,dc=org -mail: ldapadmin@grafana.com -userPassword: grafana -objectClass: person -objectClass: top -objectClass: inetOrgPerson -objectClass: organizationalPerson -sn: ldapadmin -cn: ldapadmin -memberOf: cn=admins,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/adminsgroup.ldif b/docker/blocks/openldap/prepopulate/adminsgroup.ldif deleted file mode 100644 index d8dece4e4587..000000000000 --- a/docker/blocks/openldap/prepopulate/adminsgroup.ldif +++ /dev/null @@ -1,5 +0,0 @@ -dn: cn=admins,dc=grafana,dc=org -cn: admins -member: cn=ldapadmin,dc=grafana,dc=org -objectClass: groupOfNames -objectClass: top diff --git a/docker/blocks/openldap/prepopulate/editor.ldif b/docker/blocks/openldap/prepopulate/editor.ldif deleted file mode 100644 index eba3adc4352a..000000000000 --- a/docker/blocks/openldap/prepopulate/editor.ldif +++ /dev/null @@ -1,10 +0,0 @@ -dn: cn=ldapeditor,dc=grafana,dc=org -mail: ldapeditor@grafana.com -userPassword: grafana -objectClass: person -objectClass: top -objectClass: inetOrgPerson -objectClass: organizationalPerson -sn: ldapeditor -cn: ldapeditor -memberOf: cn=users,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/groups/admins.ldif b/docker/blocks/openldap/prepopulate/groups/admins.ldif new file mode 100644 index 000000000000..50d3a0ea4a86 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/groups/admins.ldif @@ -0,0 +1,5 @@ +dn: cn=admins,ou=groups,dc=grafana,dc=org +cn: admins +objectClass: groupOfNames +objectClass: top +member: cn=ldap-admin,ou=users,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/groups/backend.ldif b/docker/blocks/openldap/prepopulate/groups/backend.ldif new file mode 100644 index 000000000000..09a661adea00 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/groups/backend.ldif @@ -0,0 +1,5 @@ +dn: cn=backend,ou=groups,dc=grafana,dc=org +cn: backend +objectClass: groupOfNames +objectClass: top +member: cn=ldap-editor,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/groups/editor.ldif b/docker/blocks/openldap/prepopulate/groups/editor.ldif new file mode 100644 index 000000000000..331ecc94141e --- /dev/null +++ b/docker/blocks/openldap/prepopulate/groups/editor.ldif @@ -0,0 +1,5 @@ +dn: cn=editors,ou=groups,dc=grafana,dc=org +cn: editors +objectClass: groupOfNames +objectClass: top +member: cn=ldap-editor,ou=users,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/groups/frontend.ldif b/docker/blocks/openldap/prepopulate/groups/frontend.ldif new file mode 100644 index 000000000000..c410b96c7ad5 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/groups/frontend.ldif @@ -0,0 +1,5 @@ +dn: cn=frontend,ou=groups,dc=grafana,dc=org +cn: frontend +objectClass: groupOfNames +objectClass: top +member: cn=ldap-frontend-1,ou=users,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/units/groups.ldif b/docker/blocks/openldap/prepopulate/units/groups.ldif new file mode 100644 index 000000000000..64e21ad744f2 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/units/groups.ldif @@ -0,0 +1,3 @@ +dn: ou=groups,dc=grafana,dc=org +objectclass: top +objectclass: organizationalUnit diff --git a/docker/blocks/openldap/prepopulate/units/users.ldif b/docker/blocks/openldap/prepopulate/units/users.ldif new file mode 100644 index 000000000000..76fc50dd2997 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/units/users.ldif @@ -0,0 +1,3 @@ +dn: ou=users,dc=grafana,dc=org +objectclass: top +objectclass: organizationalUnit diff --git a/docker/blocks/openldap/prepopulate/users/ldap-admin.ldif b/docker/blocks/openldap/prepopulate/users/ldap-admin.ldif new file mode 100644 index 000000000000..1704a15c3db9 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/users/ldap-admin.ldif @@ -0,0 +1,11 @@ +dn: cn=ldap-admin,ou=users,dc=grafana,dc=org +mail: ldap-admin@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-admin +cn: ldap-admin +memberOf: cn=admins,ou=groups,dc=grafana,dc=org +memberOf: cn=editors,ou=groups,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/users/ldap-editor.ldif b/docker/blocks/openldap/prepopulate/users/ldap-editor.ldif new file mode 100644 index 000000000000..d0de99f8c16e --- /dev/null +++ b/docker/blocks/openldap/prepopulate/users/ldap-editor.ldif @@ -0,0 +1,10 @@ +dn: cn=ldap-editor,ou=users,dc=grafana,dc=org +mail: ldap-editor@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-editor +cn: ldap-editor +memberOf: cn=editors,ou=groups,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/users/ldap-frontend-1.ldif b/docker/blocks/openldap/prepopulate/users/ldap-frontend-1.ldif new file mode 100644 index 000000000000..f5ebe0b41c4d --- /dev/null +++ b/docker/blocks/openldap/prepopulate/users/ldap-frontend-1.ldif @@ -0,0 +1,10 @@ +dn: cn=ldap-frontend-1,ou=users,dc=grafana,dc=org +mail: ldap-frontend-1@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-frontend-1 +cn: ldap-frontend-1 +memberOf: cn=frontend,ou=groups,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/viewer.ldif b/docker/blocks/openldap/prepopulate/users/ldap-viewer.ldif similarity index 52% rename from docker/blocks/openldap/prepopulate/viewer.ldif rename to docker/blocks/openldap/prepopulate/users/ldap-viewer.ldif index f699a7df57b7..07066355a139 100644 --- a/docker/blocks/openldap/prepopulate/viewer.ldif +++ b/docker/blocks/openldap/prepopulate/users/ldap-viewer.ldif @@ -1,9 +1,9 @@ -dn: cn=ldapviewer,dc=grafana,dc=org -mail: ldapviewer@grafana.com +dn: cn=ldap-viewer,ou=users,dc=grafana,dc=org +mail: ldap-viewer@grafana.com userPassword: grafana objectClass: person objectClass: top objectClass: inetOrgPerson objectClass: organizationalPerson -sn: ldapviewer -cn: ldapviewer +sn: ldap-viewer +cn: ldap-viewer diff --git a/docker/blocks/openldap/prepopulate/usersgroup.ldif b/docker/blocks/openldap/prepopulate/usersgroup.ldif deleted file mode 100644 index a1de3a50d383..000000000000 --- a/docker/blocks/openldap/prepopulate/usersgroup.ldif +++ /dev/null @@ -1,5 +0,0 @@ -dn: cn=users,dc=grafana,dc=org -cn: users -member: cn=ldapeditor,dc=grafana,dc=org -objectClass: groupOfNames -objectClass: top From a89351e8e0c8a594de31065a9410675d9bdd9c8d Mon Sep 17 00:00:00 2001 From: zicklam Date: Mon, 2 Jul 2018 18:01:42 +0200 Subject: [PATCH 055/263] correct example (#12481) foo.bar.com in description, but foo.bar in configuration. Updated description, makes more sense then changing the configuration :) --- docs/sources/installation/behind_proxy.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/sources/installation/behind_proxy.md b/docs/sources/installation/behind_proxy.md index 89711aecb468..6e3884456ac1 100644 --- a/docs/sources/installation/behind_proxy.md +++ b/docs/sources/installation/behind_proxy.md @@ -26,7 +26,7 @@ Otherwise Grafana will not behave correctly. See example below. ## Examples Here are some example configurations for running Grafana behind a reverse proxy. -### Grafana configuration (ex http://foo.bar.com) +### Grafana configuration (ex http://foo.bar) ```bash [server] @@ -47,7 +47,7 @@ server { } ``` -### Examples with **sub path** (ex http://foo.bar.com/grafana) +### Examples with **sub path** (ex http://foo.bar/grafana) #### Grafana configuration with sub path ```bash From 0d1f7c8782f3cc7ad9cdd20c5257fdc087362f61 Mon Sep 17 00:00:00 2001 From: David Date: Mon, 2 Jul 2018 20:04:36 +0200 Subject: [PATCH 056/263] Fix bar width issue in aligned prometheus queries (#12483) * Fix bar width issue in aligned prometheus queries This was broken because null values were filled in with unaligned times. * use aligned times for result transformation * add tests An earlier version of this fix aligned the times again in the transformer, but I think it's safe to only deal with aligned times in the response. * Fixed prometheus heatmap tranformer test The interval needs to be 1 to prevent step alignment. --- public/app/core/specs/time_series.jest.ts | 14 ++++ .../datasource/prometheus/datasource.ts | 4 +- .../prometheus/specs/datasource.jest.ts | 2 +- .../specs/result_transformer.jest.ts | 78 +++++++++++++++++++ 4 files changed, 95 insertions(+), 3 deletions(-) diff --git a/public/app/core/specs/time_series.jest.ts b/public/app/core/specs/time_series.jest.ts index f52454762183..bf50d807e030 100644 --- a/public/app/core/specs/time_series.jest.ts +++ b/public/app/core/specs/time_series.jest.ts @@ -119,6 +119,20 @@ describe('TimeSeries', function() { series.getFlotPairs('null'); expect(series.stats.avg).toBe(null); }); + + it('calculates timeStep', function() { + series = new TimeSeries({ + datapoints: [[null, 1], [null, 2], [null, 3]], + }); + series.getFlotPairs('null'); + expect(series.stats.timeStep).toBe(1); + + series = new TimeSeries({ + datapoints: [[0, 1530529290], [0, 1530529305], [0, 1530529320]], + }); + series.getFlotPairs('null'); + expect(series.stats.timeStep).toBe(15); + }); }); describe('When checking if ms resolution is needed', function() { diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts index 46431a08ab1e..d7d33264c99c 100644 --- a/public/app/plugins/datasource/prometheus/datasource.ts +++ b/public/app/plugins/datasource/prometheus/datasource.ts @@ -162,8 +162,8 @@ export class PrometheusDatasource { format: activeTargets[index].format, step: queries[index].step, legendFormat: activeTargets[index].legendFormat, - start: start, - end: end, + start: queries[index].start, + end: queries[index].end, query: queries[index].expr, responseListLength: responseList.length, responseIndex: index, diff --git a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts index 0157322da580..219b990e5dd3 100644 --- a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts @@ -68,7 +68,7 @@ describe('PrometheusDatasource', () => { ctx.query = { range: { from: moment(1443454528000), to: moment(1443454528000) }, targets: [{ expr: 'test{job="testjob"}', format: 'heatmap', legendFormat: '{{le}}' }], - interval: '60s', + interval: '1s', }; }); diff --git a/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts b/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts index 56a05d5aedb4..b94cca790597 100644 --- a/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts @@ -127,4 +127,82 @@ describe('Prometheus Result Transformer', () => { ]); }); }); + + describe('When resultFormat is time series', () => { + it('should transform matrix into timeseries', () => { + const response = { + status: 'success', + data: { + resultType: 'matrix', + result: [ + { + metric: { __name__: 'test', job: 'testjob' }, + values: [[0, '10'], [1, '10'], [2, '0']], + }, + ], + }, + }; + let result = []; + let options = { + format: 'timeseries', + start: 0, + end: 2, + }; + + ctx.resultTransformer.transform(result, { data: response }, options); + expect(result).toEqual([{ target: 'test{job="testjob"}', datapoints: [[10, 0], [10, 1000], [0, 2000]] }]); + }); + + it('should fill timeseries with null values', () => { + const response = { + status: 'success', + data: { + resultType: 'matrix', + result: [ + { + metric: { __name__: 'test', job: 'testjob' }, + values: [[1, '10'], [2, '0']], + }, + ], + }, + }; + let result = []; + let options = { + format: 'timeseries', + step: 1, + start: 0, + end: 2, + }; + + ctx.resultTransformer.transform(result, { data: response }, options); + expect(result).toEqual([{ target: 'test{job="testjob"}', datapoints: [[null, 0], [10, 1000], [0, 2000]] }]); + }); + + it('should align null values with step', () => { + const response = { + status: 'success', + data: { + resultType: 'matrix', + result: [ + { + metric: { __name__: 'test', job: 'testjob' }, + values: [[4, '10'], [8, '10']], + }, + ], + }, + }; + let result = []; + let options = { + format: 'timeseries', + step: 2, + start: 0, + end: 8, + }; + + ctx.resultTransformer.transform(result, { data: response }, options); + expect(result).toEqual([ + { target: 'test{job="testjob"}', datapoints: [[null, 0], [null, 2000], [10, 4000], [null, 6000], [10, 8000]] }, + ]); + }); + }); }); From 6046c8b4ca528bd4b2bb914f6d398914b6b29299 Mon Sep 17 00:00:00 2001 From: Martin Packman Date: Mon, 2 Jul 2018 20:14:41 +0200 Subject: [PATCH 057/263] Make table sorting stable when null values exist (#12362) Currently if a null appears in a table column, for instance in data returned by postgres, sorting on that gives an arbitrary order. This is due to null being neither greater or less than any string, which makes the sort unstable. Change the table sort function to compare on nullness first. Note this is a slight behaviour change for numbers, which would otherwise treat null and 0 as equivalent. Signed-off-by: Martin Packman --- public/app/core/specs/table_model.jest.ts | 35 +++++++++++++++++++++++ public/app/core/table_model.ts | 17 ++++------- 2 files changed, 40 insertions(+), 12 deletions(-) diff --git a/public/app/core/specs/table_model.jest.ts b/public/app/core/specs/table_model.jest.ts index a2c1eb5e1aff..3d4c526cfead 100644 --- a/public/app/core/specs/table_model.jest.ts +++ b/public/app/core/specs/table_model.jest.ts @@ -44,3 +44,38 @@ describe('when sorting table asc', () => { expect(table.rows[2][1]).toBe(15); }); }); + +describe('when sorting with nulls', () => { + var table; + var values; + + beforeEach(() => { + table = new TableModel(); + table.columns = [{}, {}]; + table.rows = [[42, ''], [19, 'a'], [null, 'b'], [0, 'd'], [null, null], [2, 'c'], [0, null], [-8, '']]; + }); + + it('numbers with nulls at end with asc sort', () => { + table.sort({ col: 0, desc: false }); + values = table.rows.map(row => row[0]); + expect(values).toEqual([-8, 0, 0, 2, 19, 42, null, null]); + }); + + it('numbers with nulls at start with desc sort', () => { + table.sort({ col: 0, desc: true }); + values = table.rows.map(row => row[0]); + expect(values).toEqual([null, null, 42, 19, 2, 0, 0, -8]); + }); + + it('strings with nulls at end with asc sort', () => { + table.sort({ col: 1, desc: false }); + values = table.rows.map(row => row[1]); + expect(values).toEqual(['', '', 'a', 'b', 'c', 'd', null, null]); + }); + + it('strings with nulls at start with desc sort', () => { + table.sort({ col: 1, desc: true }); + values = table.rows.map(row => row[1]); + expect(values).toEqual([null, null, 'd', 'c', 'b', 'a', '', '']); + }); +}); diff --git a/public/app/core/table_model.ts b/public/app/core/table_model.ts index 5716aac2be61..04857eb806d0 100644 --- a/public/app/core/table_model.ts +++ b/public/app/core/table_model.ts @@ -19,23 +19,16 @@ export default class TableModel { this.rows.sort(function(a, b) { a = a[options.col]; b = b[options.col]; - if (a < b) { - return -1; - } - if (a > b) { - return 1; - } - return 0; + // Sort null or undefined seperately from comparable values + return +(a == null) - +(b == null) || +(a > b) || -(a < b); }); - this.columns[options.col].sort = true; - if (options.desc) { this.rows.reverse(); - this.columns[options.col].desc = true; - } else { - this.columns[options.col].desc = false; } + + this.columns[options.col].sort = true; + this.columns[options.col].desc = options.desc; } addColumn(col) { From 1586a42a719f0c0724843e54ed9c4a015d8d00dc Mon Sep 17 00:00:00 2001 From: Leonard Gram Date: Tue, 3 Jul 2018 09:25:15 +0200 Subject: [PATCH 058/263] ci: Only publish grafana enterprise packages tagged with enterprise. --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0217c129a623..9b2f436adeec 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -174,7 +174,7 @@ jobs: - persist_to_workspace: root: . paths: - - enterprise-dist/grafana* + - enterprise-dist/grafana-enterprise* deploy-enterprise-master: docker: From 1f97df46c136939e31b6eb3a72661190817a2fce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Tue, 3 Jul 2018 12:57:48 +0200 Subject: [PATCH 059/263] devenv: open ldap docker block now prepopulating data with correct member groups --- docker/blocks/openldap/Dockerfile | 4 +- docker/blocks/openldap/entrypoint.sh | 17 ++-- docker/blocks/openldap/notes.md | 26 +++++- docker/blocks/openldap/prepopulate.sh | 14 ++++ .../blocks/openldap/prepopulate/1_units.ldif | 9 +++ .../blocks/openldap/prepopulate/2_users.ldif | 80 +++++++++++++++++++ .../blocks/openldap/prepopulate/3_groups.ldif | 25 ++++++ .../openldap/prepopulate/groups/admins.ldif | 5 -- .../openldap/prepopulate/groups/backend.ldif | 5 -- .../openldap/prepopulate/groups/editor.ldif | 5 -- .../openldap/prepopulate/groups/frontend.ldif | 5 -- .../openldap/prepopulate/units/groups.ldif | 3 - .../openldap/prepopulate/units/users.ldif | 3 - .../prepopulate/users/ldap-admin.ldif | 11 --- .../prepopulate/users/ldap-editor.ldif | 10 --- .../prepopulate/users/ldap-frontend-1.ldif | 10 --- .../prepopulate/users/ldap-viewer.ldif | 9 --- pkg/login/ext_user.go | 2 + 18 files changed, 163 insertions(+), 80 deletions(-) create mode 100755 docker/blocks/openldap/prepopulate.sh create mode 100644 docker/blocks/openldap/prepopulate/1_units.ldif create mode 100644 docker/blocks/openldap/prepopulate/2_users.ldif create mode 100644 docker/blocks/openldap/prepopulate/3_groups.ldif delete mode 100644 docker/blocks/openldap/prepopulate/groups/admins.ldif delete mode 100644 docker/blocks/openldap/prepopulate/groups/backend.ldif delete mode 100644 docker/blocks/openldap/prepopulate/groups/editor.ldif delete mode 100644 docker/blocks/openldap/prepopulate/groups/frontend.ldif delete mode 100644 docker/blocks/openldap/prepopulate/units/groups.ldif delete mode 100644 docker/blocks/openldap/prepopulate/units/users.ldif delete mode 100644 docker/blocks/openldap/prepopulate/users/ldap-admin.ldif delete mode 100644 docker/blocks/openldap/prepopulate/users/ldap-editor.ldif delete mode 100644 docker/blocks/openldap/prepopulate/users/ldap-frontend-1.ldif delete mode 100644 docker/blocks/openldap/prepopulate/users/ldap-viewer.ldif diff --git a/docker/blocks/openldap/Dockerfile b/docker/blocks/openldap/Dockerfile index c9b928ad56ae..76172e133a45 100644 --- a/docker/blocks/openldap/Dockerfile +++ b/docker/blocks/openldap/Dockerfile @@ -8,7 +8,8 @@ ENV OPENLDAP_VERSION 2.4.40 RUN apt-get update && \ DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \ - slapd=${OPENLDAP_VERSION}* && \ + slapd=${OPENLDAP_VERSION}* \ + ldap-utils && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* @@ -22,6 +23,7 @@ COPY modules/ /etc/ldap.dist/modules COPY prepopulate/ /etc/ldap.dist/prepopulate COPY entrypoint.sh /entrypoint.sh +COPY prepopulate.sh /prepopulate.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/docker/blocks/openldap/entrypoint.sh b/docker/blocks/openldap/entrypoint.sh index 2f383355907d..d202ed14b31f 100755 --- a/docker/blocks/openldap/entrypoint.sh +++ b/docker/blocks/openldap/entrypoint.sh @@ -76,21 +76,14 @@ EOF IFS=","; declare -a modules=($SLAPD_ADDITIONAL_MODULES); unset IFS for module in "${modules[@]}"; do - slapadd -n0 -F /etc/ldap/slapd.d -l "/etc/ldap/modules/${module}.ldif" >/dev/null 2>&1 + echo "Adding module ${module}" + slapadd -n0 -F /etc/ldap/slapd.d -l "/etc/ldap/modules/${module}.ldif" >/dev/null 2>&1 done fi - for file in `ls /etc/ldap/prepopulate/units/*.ldif`; do - slapadd -F /etc/ldap/slapd.d -l "$file" - done - - for file in `ls /etc/ldap/prepopulate/groups/*.ldif`; do - slapadd -F /etc/ldap/slapd.d -l "$file" - done - - for file in `ls /etc/ldap/prepopulate/users/*.ldif`; do - slapadd -F /etc/ldap/slapd.d -l "$file" - done + # This needs to run in background + # Will prepopulate entries after ldap daemon has started + ./prepopulate.sh & chown -R openldap:openldap /etc/ldap/slapd.d/ /var/lib/ldap/ /var/run/slapd/ else diff --git a/docker/blocks/openldap/notes.md b/docker/blocks/openldap/notes.md index 483266f0d886..8de23d5ccf27 100644 --- a/docker/blocks/openldap/notes.md +++ b/docker/blocks/openldap/notes.md @@ -1,6 +1,6 @@ # Notes on OpenLdap Docker Block -Any ldif files added to the prepopulate subdirectory will be automatically imported into the OpenLdap database. +Any ldif files added to the prepopulate subdirectory will be automatically imported into the OpenLdap database. The ldif files add three users, `ldapviewer`, `ldapeditor` and `ldapadmin`. Two groups, `admins` and `users`, are added that correspond with the group mappings in the default conf/ldap.toml. `ldapadmin` is a member of `admins` and `ldapeditor` is a member of `users`. @@ -22,3 +22,27 @@ enabled = true config_file = conf/ldap.toml ; allow_sign_up = true ``` + +Test groups & users + +admins + ldap-admin + ldap-torkel + ldap-daniel +backend + ldap-carl + ldap-torkel + ldap-leo +frontend + ldap-torkel + ldap-tobias + ldap-daniel +editors + ldap-editors + + +no groups + ldap-viewer + + + diff --git a/docker/blocks/openldap/prepopulate.sh b/docker/blocks/openldap/prepopulate.sh new file mode 100755 index 000000000000..aa11f8aba4f7 --- /dev/null +++ b/docker/blocks/openldap/prepopulate.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +echo "Pre-populating ldap entries, first waiting for ldap to start" + +sleep 3 + +adminUserDn="cn=admin,dc=grafana,dc=org" +adminPassword="grafana" + +for file in `ls /etc/ldap/prepopulate/*.ldif`; do + ldapadd -x -D $adminUserDn -w $adminPassword -f "$file" +done + + diff --git a/docker/blocks/openldap/prepopulate/1_units.ldif b/docker/blocks/openldap/prepopulate/1_units.ldif new file mode 100644 index 000000000000..22e063036889 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/1_units.ldif @@ -0,0 +1,9 @@ +dn: ou=groups,dc=grafana,dc=org +ou: Groups +objectclass: top +objectclass: organizationalUnit + +dn: ou=users,dc=grafana,dc=org +ou: Users +objectclass: top +objectclass: organizationalUnit diff --git a/docker/blocks/openldap/prepopulate/2_users.ldif b/docker/blocks/openldap/prepopulate/2_users.ldif new file mode 100644 index 000000000000..52e74b1e4b10 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/2_users.ldif @@ -0,0 +1,80 @@ +# ldap-admin +dn: cn=ldap-admin,ou=users,dc=grafana,dc=org +mail: ldap-admin@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-admin +cn: ldap-admin + +dn: cn=ldap-editor,ou=users,dc=grafana,dc=org +mail: ldap-editor@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-editor +cn: ldap-editor + +dn: cn=ldap-viewer,ou=users,dc=grafana,dc=org +mail: ldap-viewer@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-viewer +cn: ldap-viewer + +dn: cn=ldap-carl,ou=users,dc=grafana,dc=org +mail: ldap-carl@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-carl +cn: ldap-carl + +dn: cn=ldap-daniel,ou=users,dc=grafana,dc=org +mail: ldap-daniel@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-daniel +cn: ldap-daniel + +dn: cn=ldap-leo,ou=users,dc=grafana,dc=org +mail: ldap-leo@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-leo +cn: ldap-leo + +dn: cn=ldap-tobias,ou=users,dc=grafana,dc=org +mail: ldap-tobias@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-tobias +cn: ldap-tobias + +dn: cn=ldap-torkel,ou=users,dc=grafana,dc=org +mail: ldap-torkel@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-torkel +cn: ldap-torkel diff --git a/docker/blocks/openldap/prepopulate/3_groups.ldif b/docker/blocks/openldap/prepopulate/3_groups.ldif new file mode 100644 index 000000000000..8638a089cc8e --- /dev/null +++ b/docker/blocks/openldap/prepopulate/3_groups.ldif @@ -0,0 +1,25 @@ +dn: cn=admins,ou=groups,dc=grafana,dc=org +cn: admins +objectClass: groupOfNames +objectClass: top +member: cn=ldap-admin,ou=users,dc=grafana,dc=org +member: cn=ldap-torkel,ou=users,dc=grafana,dc=org + +dn: cn=editors,ou=groups,dc=grafana,dc=org +cn: editors +objectClass: groupOfNames +member: cn=ldap-editor,ou=users,dc=grafana,dc=org + +dn: cn=backend,ou=groups,dc=grafana,dc=org +cn: backend +objectClass: groupOfNames +member: cn=ldap-carl,ou=users,dc=grafana,dc=org +member: cn=ldap-leo,ou=users,dc=grafana,dc=org +member: cn=ldap-torkel,ou=users,dc=grafana,dc=org + +dn: cn=frontend,ou=groups,dc=grafana,dc=org +cn: frontend +objectClass: groupOfNames +member: cn=ldap-torkel,ou=users,dc=grafana,dc=org +member: cn=ldap-daniel,ou=users,dc=grafana,dc=org +member: cn=ldap-leo,ou=users,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/groups/admins.ldif b/docker/blocks/openldap/prepopulate/groups/admins.ldif deleted file mode 100644 index 50d3a0ea4a86..000000000000 --- a/docker/blocks/openldap/prepopulate/groups/admins.ldif +++ /dev/null @@ -1,5 +0,0 @@ -dn: cn=admins,ou=groups,dc=grafana,dc=org -cn: admins -objectClass: groupOfNames -objectClass: top -member: cn=ldap-admin,ou=users,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/groups/backend.ldif b/docker/blocks/openldap/prepopulate/groups/backend.ldif deleted file mode 100644 index 09a661adea00..000000000000 --- a/docker/blocks/openldap/prepopulate/groups/backend.ldif +++ /dev/null @@ -1,5 +0,0 @@ -dn: cn=backend,ou=groups,dc=grafana,dc=org -cn: backend -objectClass: groupOfNames -objectClass: top -member: cn=ldap-editor,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/groups/editor.ldif b/docker/blocks/openldap/prepopulate/groups/editor.ldif deleted file mode 100644 index 331ecc94141e..000000000000 --- a/docker/blocks/openldap/prepopulate/groups/editor.ldif +++ /dev/null @@ -1,5 +0,0 @@ -dn: cn=editors,ou=groups,dc=grafana,dc=org -cn: editors -objectClass: groupOfNames -objectClass: top -member: cn=ldap-editor,ou=users,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/groups/frontend.ldif b/docker/blocks/openldap/prepopulate/groups/frontend.ldif deleted file mode 100644 index c410b96c7ad5..000000000000 --- a/docker/blocks/openldap/prepopulate/groups/frontend.ldif +++ /dev/null @@ -1,5 +0,0 @@ -dn: cn=frontend,ou=groups,dc=grafana,dc=org -cn: frontend -objectClass: groupOfNames -objectClass: top -member: cn=ldap-frontend-1,ou=users,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/units/groups.ldif b/docker/blocks/openldap/prepopulate/units/groups.ldif deleted file mode 100644 index 64e21ad744f2..000000000000 --- a/docker/blocks/openldap/prepopulate/units/groups.ldif +++ /dev/null @@ -1,3 +0,0 @@ -dn: ou=groups,dc=grafana,dc=org -objectclass: top -objectclass: organizationalUnit diff --git a/docker/blocks/openldap/prepopulate/units/users.ldif b/docker/blocks/openldap/prepopulate/units/users.ldif deleted file mode 100644 index 76fc50dd2997..000000000000 --- a/docker/blocks/openldap/prepopulate/units/users.ldif +++ /dev/null @@ -1,3 +0,0 @@ -dn: ou=users,dc=grafana,dc=org -objectclass: top -objectclass: organizationalUnit diff --git a/docker/blocks/openldap/prepopulate/users/ldap-admin.ldif b/docker/blocks/openldap/prepopulate/users/ldap-admin.ldif deleted file mode 100644 index 1704a15c3db9..000000000000 --- a/docker/blocks/openldap/prepopulate/users/ldap-admin.ldif +++ /dev/null @@ -1,11 +0,0 @@ -dn: cn=ldap-admin,ou=users,dc=grafana,dc=org -mail: ldap-admin@grafana.com -userPassword: grafana -objectClass: person -objectClass: top -objectClass: inetOrgPerson -objectClass: organizationalPerson -sn: ldap-admin -cn: ldap-admin -memberOf: cn=admins,ou=groups,dc=grafana,dc=org -memberOf: cn=editors,ou=groups,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/users/ldap-editor.ldif b/docker/blocks/openldap/prepopulate/users/ldap-editor.ldif deleted file mode 100644 index d0de99f8c16e..000000000000 --- a/docker/blocks/openldap/prepopulate/users/ldap-editor.ldif +++ /dev/null @@ -1,10 +0,0 @@ -dn: cn=ldap-editor,ou=users,dc=grafana,dc=org -mail: ldap-editor@grafana.com -userPassword: grafana -objectClass: person -objectClass: top -objectClass: inetOrgPerson -objectClass: organizationalPerson -sn: ldap-editor -cn: ldap-editor -memberOf: cn=editors,ou=groups,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/users/ldap-frontend-1.ldif b/docker/blocks/openldap/prepopulate/users/ldap-frontend-1.ldif deleted file mode 100644 index f5ebe0b41c4d..000000000000 --- a/docker/blocks/openldap/prepopulate/users/ldap-frontend-1.ldif +++ /dev/null @@ -1,10 +0,0 @@ -dn: cn=ldap-frontend-1,ou=users,dc=grafana,dc=org -mail: ldap-frontend-1@grafana.com -userPassword: grafana -objectClass: person -objectClass: top -objectClass: inetOrgPerson -objectClass: organizationalPerson -sn: ldap-frontend-1 -cn: ldap-frontend-1 -memberOf: cn=frontend,ou=groups,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/users/ldap-viewer.ldif b/docker/blocks/openldap/prepopulate/users/ldap-viewer.ldif deleted file mode 100644 index 07066355a139..000000000000 --- a/docker/blocks/openldap/prepopulate/users/ldap-viewer.ldif +++ /dev/null @@ -1,9 +0,0 @@ -dn: cn=ldap-viewer,ou=users,dc=grafana,dc=org -mail: ldap-viewer@grafana.com -userPassword: grafana -objectClass: person -objectClass: top -objectClass: inetOrgPerson -objectClass: organizationalPerson -sn: ldap-viewer -cn: ldap-viewer diff --git a/pkg/login/ext_user.go b/pkg/login/ext_user.go index d2f1aa1ff52f..d6eaf9a975e0 100644 --- a/pkg/login/ext_user.go +++ b/pkg/login/ext_user.go @@ -21,6 +21,7 @@ func UpsertUser(cmd *m.UpsertUserCommand) error { Email: extUser.Email, Login: extUser.Login, } + err := bus.Dispatch(userQuery) if err != m.ErrUserNotFound && err != nil { return err @@ -90,6 +91,7 @@ func createUser(extUser *m.ExternalUserInfo) (*m.User, error) { Name: extUser.Name, SkipOrgSetup: len(extUser.OrgRoles) > 0, } + if err := bus.Dispatch(cmd); err != nil { return nil, err } From 27aa1cd568c8ffdb748fcfdd311fa836f93bc9f2 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 3 Jul 2018 19:41:34 +0200 Subject: [PATCH 060/263] changelog: add notes about closing #12362 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b3ba90767743..3dd520f8d76a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ * **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley) * **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248) * **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps) +* **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2) # 5.2.1 (2018-06-29) From 60792d57ddb256b266a7c89f37b390ba4c6fceeb Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 3 Jul 2018 19:44:21 +0200 Subject: [PATCH 061/263] changelog: add notes about closing #12379 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3dd520f8d76a..d9150128289b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ * **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248) * **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps) * **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2) +* **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) # 5.2.1 (2018-06-29) From 512e7f8567e0ce07df5882b26c3daf697bb458e0 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 3 Jul 2018 19:50:21 +0200 Subject: [PATCH 062/263] changelog: add notes about closing #8186 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d9150128289b..717d9fcaa5f7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ * **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps) * **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2) * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) +* **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) # 5.2.1 (2018-06-29) From faf2ab4249ee8859c52fb83db48268c30e05bac9 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 3 Jul 2018 19:54:09 +0200 Subject: [PATCH 063/263] changelog: add notes about closing #12460 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 717d9fcaa5f7..a7a4f6ca3e75 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ * **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2) * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) +* **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) # 5.2.1 (2018-06-29) From 634c77469d975fe4ef0536b2fc043de9d21342b5 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 3 Jul 2018 19:58:19 +0200 Subject: [PATCH 064/263] changelog: add notes about closing #11818 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a7a4f6ca3e75..879dc23eaed0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) +- **Github OAuth** Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) # 5.2.1 (2018-06-29) From 723a894fc5d077b2a11d8685d777e3812bdf77bb Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 3 Jul 2018 19:59:34 +0200 Subject: [PATCH 065/263] changelog: update [skip ci] --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 879dc23eaed0..6ddc9497dbc0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,7 +12,7 @@ * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) -- **Github OAuth** Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) +* **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) # 5.2.1 (2018-06-29) From d2f31a716f44ce2da7836d2d359a5de8aa8dbb4a Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 4 Jul 2018 12:16:39 +0200 Subject: [PATCH 066/263] remove unnecessary conversions --- pkg/services/alerting/extractor_test.go | 8 ++++---- .../client/search_request_test.go | 18 +++++++++--------- pkg/tsdb/mssql/mssql_test.go | 8 ++++---- pkg/tsdb/mysql/mysql_test.go | 10 +++++----- pkg/tsdb/postgres/postgres_test.go | 8 ++++---- 5 files changed, 26 insertions(+), 26 deletions(-) diff --git a/pkg/services/alerting/extractor_test.go b/pkg/services/alerting/extractor_test.go index 861e9b9cbfc3..c7212e48174b 100644 --- a/pkg/services/alerting/extractor_test.go +++ b/pkg/services/alerting/extractor_test.go @@ -50,7 +50,7 @@ func TestAlertRuleExtraction(t *testing.T) { So(err, ShouldBeNil) Convey("Extractor should not modify the original json", func() { - dashJson, err := simplejson.NewJson([]byte(json)) + dashJson, err := simplejson.NewJson(json) So(err, ShouldBeNil) dash := m.NewDashboardFromJson(dashJson) @@ -79,7 +79,7 @@ func TestAlertRuleExtraction(t *testing.T) { Convey("Parsing and validating dashboard containing graphite alerts", func() { - dashJson, err := simplejson.NewJson([]byte(json)) + dashJson, err := simplejson.NewJson(json) So(err, ShouldBeNil) dash := m.NewDashboardFromJson(dashJson) @@ -143,7 +143,7 @@ func TestAlertRuleExtraction(t *testing.T) { panelWithoutId, err := ioutil.ReadFile("./test-data/panels-missing-id.json") So(err, ShouldBeNil) - dashJson, err := simplejson.NewJson([]byte(panelWithoutId)) + dashJson, err := simplejson.NewJson(panelWithoutId) So(err, ShouldBeNil) dash := m.NewDashboardFromJson(dashJson) extractor := NewDashAlertExtractor(dash, 1) @@ -159,7 +159,7 @@ func TestAlertRuleExtraction(t *testing.T) { panelWithIdZero, err := ioutil.ReadFile("./test-data/panel-with-id-0.json") So(err, ShouldBeNil) - dashJson, err := simplejson.NewJson([]byte(panelWithIdZero)) + dashJson, err := simplejson.NewJson(panelWithIdZero) So(err, ShouldBeNil) dash := m.NewDashboardFromJson(dashJson) extractor := NewDashAlertExtractor(dash, 1) diff --git a/pkg/tsdb/elasticsearch/client/search_request_test.go b/pkg/tsdb/elasticsearch/client/search_request_test.go index b026578d64f5..862b8058cbab 100644 --- a/pkg/tsdb/elasticsearch/client/search_request_test.go +++ b/pkg/tsdb/elasticsearch/client/search_request_test.go @@ -32,7 +32,7 @@ func TestSearchRequest(t *testing.T) { Convey("When marshal to JSON should generate correct json", func() { body, err := json.Marshal(sr) So(err, ShouldBeNil) - json, err := simplejson.NewJson([]byte(body)) + json, err := simplejson.NewJson(body) So(err, ShouldBeNil) So(json.Get("size").MustInt(500), ShouldEqual, 0) So(json.Get("sort").Interface(), ShouldBeNil) @@ -81,7 +81,7 @@ func TestSearchRequest(t *testing.T) { Convey("When marshal to JSON should generate correct json", func() { body, err := json.Marshal(sr) So(err, ShouldBeNil) - json, err := simplejson.NewJson([]byte(body)) + json, err := simplejson.NewJson(body) So(err, ShouldBeNil) So(json.Get("size").MustInt(0), ShouldEqual, 200) @@ -124,7 +124,7 @@ func TestSearchRequest(t *testing.T) { Convey("When marshal to JSON should generate correct json", func() { body, err := json.Marshal(sr) So(err, ShouldBeNil) - json, err := simplejson.NewJson([]byte(body)) + json, err := simplejson.NewJson(body) So(err, ShouldBeNil) scriptFields, err := json.Get("script_fields").Map() @@ -163,7 +163,7 @@ func TestSearchRequest(t *testing.T) { Convey("When marshal to JSON should generate correct json", func() { body, err := json.Marshal(sr) So(err, ShouldBeNil) - json, err := simplejson.NewJson([]byte(body)) + json, err := simplejson.NewJson(body) So(err, ShouldBeNil) So(json.Get("aggs").MustMap(), ShouldHaveLength, 2) @@ -200,7 +200,7 @@ func TestSearchRequest(t *testing.T) { Convey("When marshal to JSON should generate correct json", func() { body, err := json.Marshal(sr) So(err, ShouldBeNil) - json, err := simplejson.NewJson([]byte(body)) + json, err := simplejson.NewJson(body) So(err, ShouldBeNil) So(json.Get("aggs").MustMap(), ShouldHaveLength, 1) @@ -251,7 +251,7 @@ func TestSearchRequest(t *testing.T) { Convey("When marshal to JSON should generate correct json", func() { body, err := json.Marshal(sr) So(err, ShouldBeNil) - json, err := simplejson.NewJson([]byte(body)) + json, err := simplejson.NewJson(body) So(err, ShouldBeNil) topAggOne := json.GetPath("aggs", "1") @@ -300,7 +300,7 @@ func TestSearchRequest(t *testing.T) { Convey("When marshal to JSON should generate correct json", func() { body, err := json.Marshal(sr) So(err, ShouldBeNil) - json, err := simplejson.NewJson([]byte(body)) + json, err := simplejson.NewJson(body) So(err, ShouldBeNil) topAgg := json.GetPath("aggs", "1") @@ -364,7 +364,7 @@ func TestSearchRequest(t *testing.T) { Convey("When marshal to JSON should generate correct json", func() { body, err := json.Marshal(sr) So(err, ShouldBeNil) - json, err := simplejson.NewJson([]byte(body)) + json, err := simplejson.NewJson(body) So(err, ShouldBeNil) termsAgg := json.GetPath("aggs", "1") @@ -419,7 +419,7 @@ func TestSearchRequest(t *testing.T) { Convey("When marshal to JSON should generate correct json", func() { body, err := json.Marshal(sr) So(err, ShouldBeNil) - json, err := simplejson.NewJson([]byte(body)) + json, err := simplejson.NewJson(body) So(err, ShouldBeNil) scriptFields, err := json.Get("script_fields").Map() diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go index 2ecd3cd9e965..db04d6d1f023 100644 --- a/pkg/tsdb/mssql/mssql_test.go +++ b/pkg/tsdb/mssql/mssql_test.go @@ -531,7 +531,7 @@ func TestMSSQL(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 1) - So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3) }) Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() { @@ -553,7 +553,7 @@ func TestMSSQL(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 1) - So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3) }) Convey("When doing a metric query grouping by time and select metric column should return correct series", func() { @@ -930,7 +930,7 @@ func TestMSSQL(t *testing.T) { columns := queryResult.Tables[0].Rows[0] //Should be in milliseconds - So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000)) + So(columns[0].(int64), ShouldEqual, dt.Unix()*1000) }) Convey("When doing an annotation query with a time column in epoch second format (int) should return ms", func() { @@ -960,7 +960,7 @@ func TestMSSQL(t *testing.T) { columns := queryResult.Tables[0].Rows[0] //Should be in milliseconds - So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000)) + So(columns[0].(int64), ShouldEqual, dt.Unix()*1000) }) Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() { diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go index 22e98ac63cae..850a37617e28 100644 --- a/pkg/tsdb/mysql/mysql_test.go +++ b/pkg/tsdb/mysql/mysql_test.go @@ -132,8 +132,8 @@ func TestMySQL(t *testing.T) { So(column[7].(float64), ShouldEqual, 1.11) So(column[8].(float64), ShouldEqual, 2.22) So(*column[9].(*float32), ShouldEqual, 3.33) - So(column[10].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now()) - So(column[11].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now()) + So(column[10].(time.Time), ShouldHappenWithin, 10*time.Second, time.Now()) + So(column[11].(time.Time), ShouldHappenWithin, 10*time.Second, time.Now()) So(column[12].(string), ShouldEqual, "11:11:11") So(column[13].(int64), ShouldEqual, 2018) So(*column[14].(*[]byte), ShouldHaveSameTypeAs, []byte{1}) @@ -578,7 +578,7 @@ func TestMySQL(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 1) - So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3) }) Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() { @@ -600,7 +600,7 @@ func TestMySQL(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 1) - So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3) }) Convey("When doing a metric query grouping by time and select metric column should return correct series", func() { @@ -817,7 +817,7 @@ func TestMySQL(t *testing.T) { columns := queryResult.Tables[0].Rows[0] //Should be in milliseconds - So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000)) + So(columns[0].(int64), ShouldEqual, dt.Unix()*1000) }) Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() { diff --git a/pkg/tsdb/postgres/postgres_test.go b/pkg/tsdb/postgres/postgres_test.go index 18ceecb10808..a3a6d6546df5 100644 --- a/pkg/tsdb/postgres/postgres_test.go +++ b/pkg/tsdb/postgres/postgres_test.go @@ -512,7 +512,7 @@ func TestPostgres(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 1) - So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3) }) Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() { @@ -534,7 +534,7 @@ func TestPostgres(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 1) - So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3) }) Convey("When doing a metric query grouping by time and select metric column should return correct series", func() { @@ -721,7 +721,7 @@ func TestPostgres(t *testing.T) { columns := queryResult.Tables[0].Rows[0] //Should be in milliseconds - So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000)) + So(columns[0].(int64), ShouldEqual, dt.Unix()*1000) }) Convey("When doing an annotation query with a time column in epoch second format (int) should return ms", func() { @@ -751,7 +751,7 @@ func TestPostgres(t *testing.T) { columns := queryResult.Tables[0].Rows[0] //Should be in milliseconds - So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000)) + So(columns[0].(int64), ShouldEqual, dt.Unix()*1000) }) Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() { From 01fc6c5d958f98fe952fb7b40f88ced8cf5a28e1 Mon Sep 17 00:00:00 2001 From: Augustin Husson Date: Wed, 4 Jul 2018 12:23:10 +0200 Subject: [PATCH 067/263] fix json indentation --- docs/sources/http_api/admin.md | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/docs/sources/http_api/admin.md b/docs/sources/http_api/admin.md index 0194c69caac1..16b9115e8b9c 100644 --- a/docs/sources/http_api/admin.md +++ b/docs/sources/http_api/admin.md @@ -36,11 +36,10 @@ HTTP/1.1 200 Content-Type: application/json { -"DEFAULT": -{ - "app_mode":"production"}, - "analytics": - { + "DEFAULT": { + "app_mode":"production" + }, + "analytics": { "google_analytics_ua_id":"", "reporting_enabled":"false" }, @@ -340,4 +339,4 @@ HTTP/1.1 200 Content-Type: application/json {state: "new state", message: "alerts pause/un paused", "alertsAffected": 100} -``` \ No newline at end of file +``` From 991a4b16637ea8fed191b08649d9f044987ab768 Mon Sep 17 00:00:00 2001 From: Augustin Husson Date: Wed, 4 Jul 2018 12:26:30 +0200 Subject: [PATCH 068/263] update stats admin doc --- docs/sources/http_api/admin.md | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/docs/sources/http_api/admin.md b/docs/sources/http_api/admin.md index 16b9115e8b9c..2e03611b1252 100644 --- a/docs/sources/http_api/admin.md +++ b/docs/sources/http_api/admin.md @@ -194,15 +194,16 @@ HTTP/1.1 200 Content-Type: application/json { - "user_count":2, - "org_count":1, - "dashboard_count":4, - "db_snapshot_count":2, - "db_tag_count":6, - "data_source_count":1, - "playlist_count":1, - "starred_db_count":2, - "grafana_admin_count":2 + "users":2, + "orgs":1, + "dashboards":4, + "snapshots":2, + "tags":6, + "datasources":1, + "playlists":1, + "stars":2, + "alerts":2, + "activeUsers":1 } ``` From 86a574cc5031f78156879d1cac9315a71bb123a0 Mon Sep 17 00:00:00 2001 From: Augustin Husson Date: Wed, 4 Jul 2018 12:30:23 +0200 Subject: [PATCH 069/263] refix the settings indentation --- docs/sources/http_api/admin.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sources/http_api/admin.md b/docs/sources/http_api/admin.md index 2e03611b1252..2d4be21bb785 100644 --- a/docs/sources/http_api/admin.md +++ b/docs/sources/http_api/admin.md @@ -37,7 +37,7 @@ Content-Type: application/json { "DEFAULT": { - "app_mode":"production" + "app_mode":"production" }, "analytics": { "google_analytics_ua_id":"", From a8970a4de95eed9cdb28267fee884207b4649703 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 4 Jul 2018 13:09:42 +0200 Subject: [PATCH 070/263] run enterprise build only on master for now --- .circleci/config.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9b2f436adeec..f351040fe2fb 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -8,6 +8,9 @@ aliases: - &filter-not-release tags: ignore: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/ + - &filter-only-master + branches: + only: master version: 2 @@ -242,7 +245,7 @@ workflows: - build-all: filters: *filter-not-release - build-enterprise: - filters: *filter-not-release + filters: *filter-only-master - codespell: filters: *filter-not-release - gometalinter: @@ -277,9 +280,7 @@ workflows: - mysql-integration-test - postgres-integration-test - build-enterprise - filters: - branches: - only: master + filters: *filter-only-master release: jobs: From bd417bedb238650b10a7bd62e9abc2e74c0d1a90 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 3 Jul 2018 10:12:07 +0200 Subject: [PATCH 071/263] Start elastic ds test conversion --- ...datasource_specs.ts => datasource.jest.ts} | 120 ++++++++++-------- 1 file changed, 67 insertions(+), 53 deletions(-) rename public/app/plugins/datasource/elasticsearch/specs/{datasource_specs.ts => datasource.jest.ts} (75%) diff --git a/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts similarity index 75% rename from public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts rename to public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts index 558bccf3d0fa..9a3d57f1d4f2 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts @@ -2,31 +2,45 @@ import _ from 'lodash'; import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; import moment from 'moment'; import angular from 'angular'; -import helpers from 'test/specs/helpers'; import { ElasticDatasource } from '../datasource'; +import { TimeSrv } from 'app/features/dashboard/time_srv'; +import $q from 'q'; describe('ElasticDatasource', function() { - var ctx = new helpers.ServiceTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach(ctx.providePhase(['templateSrv', 'backendSrv', 'timeSrv'])); - - beforeEach( - angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) { - ctx.$q = $q; - ctx.$httpBackend = $httpBackend; - ctx.$rootScope = $rootScope; - ctx.$injector = $injector; - $httpBackend.when('GET', /\.html$/).respond(''); - }) - ); + //var ctx = new helpers.ServiceTestContext(); + let backendSrv = { + datasourceRequest: jest.fn() + }; + + let $rootScope = { + $on: jest.fn(), + appEvent: jest.fn(), + }; + + let timeSrv = new TimeSrv($rootScope,jest.fn(),{},{},{}); + + let ctx = { + $rootScope, + backendSrv, + $q + }; + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach(ctx.providePhase(['templateSrv', 'backendSrv', 'timeSrv'])); + + // beforeEach( + // angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) { + // ctx.$q = $q; + // ctx.$httpBackend = $httpBackend; + // ctx.$rootScope = $rootScope; + // ctx.$injector = $injector; + // $httpBackend.when('GET', /\.html$/).respond(''); + // }) + // ); function createDatasource(instanceSettings) { instanceSettings.jsonData = instanceSettings.jsonData || {}; - ctx.ds = ctx.$injector.instantiate(ElasticDatasource, { - instanceSettings: instanceSettings, - }); + ctx.ds = new ElasticDatasource(instanceSettings, {}, backendSrv, { replace: jest.fn() }, timeSrv); } describe('When testing datasource with index pattern', function() { @@ -40,13 +54,13 @@ describe('ElasticDatasource', function() { it('should translate index pattern to current day', function() { var requestOptions; - ctx.backendSrv.datasourceRequest = function(options) { + ctx.backendSrv.datasourceRequest = jest.fn((options) => { requestOptions = options; - return ctx.$q.when({ data: {} }); - }; + return Promise.resolve({ data: {} }); + }); ctx.ds.testDatasource(); - ctx.$rootScope.$apply(); + // ctx.$rootScope.$apply(); var today = moment.utc().format('YYYY.MM.DD'); expect(requestOptions.url).to.be('http://es.com/asd-' + today + '/_mapping'); @@ -63,10 +77,10 @@ describe('ElasticDatasource', function() { jsonData: { interval: 'Daily', esVersion: '2' }, }); - ctx.backendSrv.datasourceRequest = function(options) { + ctx.backendSrv.datasourceRequest = jest.fn((options) => { requestOptions = options; - return ctx.$q.when({ data: { responses: [] } }); - }; + return Promise.resolve({ data: { responses: [] } }); + }); ctx.ds.query({ range: { @@ -82,19 +96,19 @@ describe('ElasticDatasource', function() { ], }); - ctx.$rootScope.$apply(); + // ctx.$rootScope.$apply(); parts = requestOptions.data.split('\n'); header = angular.fromJson(parts[0]); }); it('should translate index pattern to current day', function() { - expect(header.index).to.eql(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']); + expect(header.index).toEqual(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']); }); it('should json escape lucene query', function() { var body = angular.fromJson(parts[1]); - expect(body.query.bool.filter[1].query_string.query).to.be('escape\\:test'); + expect(body.query.bool.filter[1].query_string.query).toBe('escape\\:test'); }); }); @@ -108,10 +122,10 @@ describe('ElasticDatasource', function() { jsonData: { esVersion: '2' }, }); - ctx.backendSrv.datasourceRequest = function(options) { + ctx.backendSrv.datasourceRequest = jest.fn((options) => { requestOptions = options; - return ctx.$q.when({ data: { responses: [] } }); - }; + return Promise.resolve({ data: { responses: [] } }); + }); ctx.ds.query({ range: { @@ -127,18 +141,18 @@ describe('ElasticDatasource', function() { ], }); - ctx.$rootScope.$apply(); + // ctx.$rootScope.$apply(); parts = requestOptions.data.split('\n'); header = angular.fromJson(parts[0]); }); it('should set search type to query_then_fetch', function() { - expect(header.search_type).to.eql('query_then_fetch'); + expect(header.search_type).toEqual('query_then_fetch'); }); it('should set size', function() { var body = angular.fromJson(parts[1]); - expect(body.size).to.be(500); + expect(body.size).toBe(500); }); }); @@ -146,8 +160,8 @@ describe('ElasticDatasource', function() { beforeEach(function() { createDatasource({ url: 'http://es.com', index: 'metricbeat' }); - ctx.backendSrv.datasourceRequest = function(options) { - return ctx.$q.when({ + ctx.backendSrv.datasourceRequest = jest.fn((options) => { + return Promise.resolve({ data: { metricbeat: { mappings: { @@ -190,7 +204,7 @@ describe('ElasticDatasource', function() { }, }, }); - }; + }) }); it('should return nested fields', function() { @@ -201,7 +215,7 @@ describe('ElasticDatasource', function() { }) .then(fieldObjects => { var fields = _.map(fieldObjects, 'text'); - expect(fields).to.eql([ + expect(fields).toEqual([ '@timestamp', 'beat.name.raw', 'beat.name', @@ -212,7 +226,7 @@ describe('ElasticDatasource', function() { 'system.process.name', ]); }); - ctx.$rootScope.$apply(); + // ctx.$rootScope.$apply(); }); it('should return fields related to query type', function() { @@ -224,7 +238,7 @@ describe('ElasticDatasource', function() { }) .then(fieldObjects => { var fields = _.map(fieldObjects, 'text'); - expect(fields).to.eql(['system.cpu.system', 'system.cpu.user', 'system.process.cpu.total']); + expect(fields).toEqual(['system.cpu.system', 'system.cpu.user', 'system.process.cpu.total']); }); ctx.ds @@ -235,10 +249,10 @@ describe('ElasticDatasource', function() { }) .then(fieldObjects => { var fields = _.map(fieldObjects, 'text'); - expect(fields).to.eql(['@timestamp']); + expect(fields).toEqual(['@timestamp']); }); - ctx.$rootScope.$apply(); + // ctx.$rootScope.$apply(); }); }); @@ -271,18 +285,18 @@ describe('ElasticDatasource', function() { ], }); - ctx.$rootScope.$apply(); + // ctx.$rootScope.$apply(); parts = requestOptions.data.split('\n'); header = angular.fromJson(parts[0]); }); it('should not set search type to count', function() { - expect(header.search_type).to.not.eql('count'); + expect(header.search_type).not.toEqual('count'); }); it('should set size to 0', function() { var body = angular.fromJson(parts[1]); - expect(body.size).to.be(0); + expect(body.size).toBe(0); }); }); @@ -324,7 +338,7 @@ describe('ElasticDatasource', function() { results = res; }); - ctx.$rootScope.$apply(); + // ctx.$rootScope.$apply(); parts = requestOptions.data.split('\n'); header = angular.fromJson(parts[0]); @@ -332,24 +346,24 @@ describe('ElasticDatasource', function() { }); it('should get results', function() { - expect(results.length).to.eql(2); + expect(results.length).toEqual(2); }); it('should use key or key_as_string', function() { - expect(results[0].text).to.eql('test'); - expect(results[1].text).to.eql('test2_as_string'); + expect(results[0].text).toEqual('test'); + expect(results[1].text).toEqual('test2_as_string'); }); it('should not set search type to count', function() { - expect(header.search_type).to.not.eql('count'); + expect(header.search_type).not.toEqual('count'); }); it('should set size to 0', function() { - expect(body.size).to.be(0); + expect(body.size).toBe(0); }); it('should not set terms aggregation size to 0', function() { - expect(body['aggs']['1']['terms'].size).to.not.be(0); + expect(body['aggs']['1']['terms'].size).not.toBe(0); }); }); }); From 12e76ad1972fe04fb97e870bab50524a3dcf014b Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 3 Jul 2018 10:47:50 +0200 Subject: [PATCH 072/263] Remove logs and comments --- .../elasticsearch/specs/datasource.jest.ts | 80 +++++++------------ 1 file changed, 30 insertions(+), 50 deletions(-) diff --git a/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts index 9a3d57f1d4f2..def60ecdcb3d 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts @@ -1,15 +1,14 @@ import _ from 'lodash'; -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; import moment from 'moment'; import angular from 'angular'; import { ElasticDatasource } from '../datasource'; import { TimeSrv } from 'app/features/dashboard/time_srv'; +import { TemplateSrv } from 'app/features/templating/template_srv'; import $q from 'q'; describe('ElasticDatasource', function() { - //var ctx = new helpers.ServiceTestContext(); let backendSrv = { - datasourceRequest: jest.fn() + datasourceRequest: jest.fn(), }; let $rootScope = { @@ -17,30 +16,21 @@ describe('ElasticDatasource', function() { appEvent: jest.fn(), }; - let timeSrv = new TimeSrv($rootScope,jest.fn(),{},{},{}); + let templateSrv = new TemplateSrv(); + + templateSrv.variables = []; + + let timeSrv = new TimeSrv($rootScope, jest.fn(), {}, {}, {}); let ctx = { $rootScope, backendSrv, - $q + $q, }; - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach(ctx.providePhase(['templateSrv', 'backendSrv', 'timeSrv'])); - - // beforeEach( - // angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) { - // ctx.$q = $q; - // ctx.$httpBackend = $httpBackend; - // ctx.$rootScope = $rootScope; - // ctx.$injector = $injector; - // $httpBackend.when('GET', /\.html$/).respond(''); - // }) - // ); function createDatasource(instanceSettings) { instanceSettings.jsonData = instanceSettings.jsonData || {}; - ctx.ds = new ElasticDatasource(instanceSettings, {}, backendSrv, { replace: jest.fn() }, timeSrv); + ctx.ds = new ElasticDatasource(instanceSettings, {}, backendSrv, templateSrv, timeSrv); } describe('When testing datasource with index pattern', function() { @@ -54,35 +44,34 @@ describe('ElasticDatasource', function() { it('should translate index pattern to current day', function() { var requestOptions; - ctx.backendSrv.datasourceRequest = jest.fn((options) => { + ctx.backendSrv.datasourceRequest = jest.fn(options => { requestOptions = options; return Promise.resolve({ data: {} }); }); ctx.ds.testDatasource(); - // ctx.$rootScope.$apply(); var today = moment.utc().format('YYYY.MM.DD'); - expect(requestOptions.url).to.be('http://es.com/asd-' + today + '/_mapping'); + expect(requestOptions.url).toBe('http://es.com/asd-' + today + '/_mapping'); }); }); describe('When issuing metric query with interval pattern', function() { var requestOptions, parts, header; - beforeEach(function() { + beforeEach(async () => { createDatasource({ url: 'http://es.com', index: '[asd-]YYYY.MM.DD', jsonData: { interval: 'Daily', esVersion: '2' }, }); - ctx.backendSrv.datasourceRequest = jest.fn((options) => { + ctx.backendSrv.datasourceRequest = jest.fn(options => { requestOptions = options; return Promise.resolve({ data: { responses: [] } }); }); - ctx.ds.query({ + await ctx.ds.query({ range: { from: moment.utc([2015, 4, 30, 10]), to: moment.utc([2015, 5, 1, 10]), @@ -96,8 +85,6 @@ describe('ElasticDatasource', function() { ], }); - // ctx.$rootScope.$apply(); - parts = requestOptions.data.split('\n'); header = angular.fromJson(parts[0]); }); @@ -122,7 +109,7 @@ describe('ElasticDatasource', function() { jsonData: { esVersion: '2' }, }); - ctx.backendSrv.datasourceRequest = jest.fn((options) => { + ctx.backendSrv.datasourceRequest = jest.fn(options => { requestOptions = options; return Promise.resolve({ data: { responses: [] } }); }); @@ -141,7 +128,6 @@ describe('ElasticDatasource', function() { ], }); - // ctx.$rootScope.$apply(); parts = requestOptions.data.split('\n'); header = angular.fromJson(parts[0]); }); @@ -157,10 +143,10 @@ describe('ElasticDatasource', function() { }); describe('When getting fields', function() { - beforeEach(function() { + beforeEach(() => { createDatasource({ url: 'http://es.com', index: 'metricbeat' }); - ctx.backendSrv.datasourceRequest = jest.fn((options) => { + ctx.backendSrv.datasourceRequest = jest.fn(options => { return Promise.resolve({ data: { metricbeat: { @@ -204,7 +190,7 @@ describe('ElasticDatasource', function() { }, }, }); - }) + }); }); it('should return nested fields', function() { @@ -226,7 +212,6 @@ describe('ElasticDatasource', function() { 'system.process.name', ]); }); - // ctx.$rootScope.$apply(); }); it('should return fields related to query type', function() { @@ -251,8 +236,6 @@ describe('ElasticDatasource', function() { var fields = _.map(fieldObjects, 'text'); expect(fields).toEqual(['@timestamp']); }); - - // ctx.$rootScope.$apply(); }); }); @@ -266,10 +249,10 @@ describe('ElasticDatasource', function() { jsonData: { esVersion: '5' }, }); - ctx.backendSrv.datasourceRequest = function(options) { + ctx.backendSrv.datasourceRequest = jest.fn(options => { requestOptions = options; - return ctx.$q.when({ data: { responses: [] } }); - }; + return Promise.resolve({ data: { responses: [] } }); + }); ctx.ds.query({ range: { @@ -285,7 +268,6 @@ describe('ElasticDatasource', function() { ], }); - // ctx.$rootScope.$apply(); parts = requestOptions.data.split('\n'); header = angular.fromJson(parts[0]); }); @@ -303,16 +285,16 @@ describe('ElasticDatasource', function() { describe('When issuing metricFind query on es5.x', function() { var requestOptions, parts, header, body, results; - beforeEach(function() { + beforeEach(() => { createDatasource({ url: 'http://es.com', index: 'test', jsonData: { esVersion: '5' }, }); - ctx.backendSrv.datasourceRequest = function(options) { + ctx.backendSrv.datasourceRequest = jest.fn(options => { requestOptions = options; - return ctx.$q.when({ + return Promise.resolve({ data: { responses: [ { @@ -332,37 +314,35 @@ describe('ElasticDatasource', function() { ], }, }); - }; + }); ctx.ds.metricFindQuery('{"find": "terms", "field": "test"}').then(res => { results = res; }); - // ctx.$rootScope.$apply(); - parts = requestOptions.data.split('\n'); header = angular.fromJson(parts[0]); body = angular.fromJson(parts[1]); }); - it('should get results', function() { + it('should get results', () => { expect(results.length).toEqual(2); }); - it('should use key or key_as_string', function() { + it('should use key or key_as_string', () => { expect(results[0].text).toEqual('test'); expect(results[1].text).toEqual('test2_as_string'); }); - it('should not set search type to count', function() { + it('should not set search type to count', () => { expect(header.search_type).not.toEqual('count'); }); - it('should set size to 0', function() { + it('should set size to 0', () => { expect(body.size).toBe(0); }); - it('should not set terms aggregation size to 0', function() { + it('should not set terms aggregation size to 0', () => { expect(body['aggs']['1']['terms'].size).not.toBe(0); }); }); From 69c85e0d7efc4bb2096b9855c75274240087947a Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 3 Jul 2018 10:55:48 +0200 Subject: [PATCH 073/263] Remove async --- .../plugins/datasource/elasticsearch/specs/datasource.jest.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts index def60ecdcb3d..32b5a4dbbd5b 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts @@ -59,7 +59,7 @@ describe('ElasticDatasource', function() { describe('When issuing metric query with interval pattern', function() { var requestOptions, parts, header; - beforeEach(async () => { + beforeEach(() => { createDatasource({ url: 'http://es.com', index: '[asd-]YYYY.MM.DD', @@ -71,7 +71,7 @@ describe('ElasticDatasource', function() { return Promise.resolve({ data: { responses: [] } }); }); - await ctx.ds.query({ + ctx.ds.query({ range: { from: moment.utc([2015, 4, 30, 10]), to: moment.utc([2015, 5, 1, 10]), From 80b2f5c7567cdef4c6cb288001ceb44f164d48be Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 3 Jul 2018 11:55:23 +0200 Subject: [PATCH 074/263] Add Jest stubs --- .../elasticsearch/specs/datasource.jest.ts | 10 ++-- public/test/specs/helpers.ts | 51 ++++++++++++++++--- 2 files changed, 49 insertions(+), 12 deletions(-) diff --git a/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts index 32b5a4dbbd5b..bfd55544d7bc 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts @@ -2,8 +2,8 @@ import _ from 'lodash'; import moment from 'moment'; import angular from 'angular'; import { ElasticDatasource } from '../datasource'; -import { TimeSrv } from 'app/features/dashboard/time_srv'; -import { TemplateSrv } from 'app/features/templating/template_srv'; +import { jestTimeSrvStub } from 'test/specs/helpers'; +import { jestTemplateSrvStub } from 'test/specs/helpers'; import $q from 'q'; describe('ElasticDatasource', function() { @@ -16,11 +16,9 @@ describe('ElasticDatasource', function() { appEvent: jest.fn(), }; - let templateSrv = new TemplateSrv(); + let templateSrv = new jestTemplateSrvStub(); - templateSrv.variables = []; - - let timeSrv = new TimeSrv($rootScope, jest.fn(), {}, {}, {}); + let timeSrv = new jestTimeSrvStub(); let ctx = { $rootScope, diff --git a/public/test/specs/helpers.ts b/public/test/specs/helpers.ts index dd8bd39846ef..d98e79ff06b6 100644 --- a/public/test/specs/helpers.ts +++ b/public/test/specs/helpers.ts @@ -195,13 +195,52 @@ export function TemplateSrvStub() { }; } +export function jestTemplateSrvStub() { + this.variables = []; + this.templateSettings = { interpolate: /\[\[([\s\S]+?)\]\]/g }; + this.data = {}; + this.replace = jest.fn(text => _.template(text, this.templateSettings)(this.data)); + this.init = jest.fn(); + this.getAdhocFilters = jest.fn(() => []); + this.fillVariableValuesForUrl = jest.fn(); + this.updateTemplateData = jest.fn(); + this.variableExists = jest.fn(() => false); + this.variableInitialized = jest.fn(); + this.highlightVariablesAsHtml = jest.fn(str => str); + this.setGrafanaVariable = jest.fn((name, value) => { + this.data[name] = value; + }); +} + +export function jestTimeSrvStub() { + this.init = jest.fn(); + this.time = { from: 'now-1h', to: 'now' }; + this.timeRange = jest.fn(parse => { + if (parse === false) { + return this.time; + } + return { + from: dateMath.parse(this.time.from, false), + to: dateMath.parse(this.time.to, true), + }; + }); + + this.replace = jest.fn(target => target); + + this.setTime = jest.fn(time => { + this.time = time; + }); +} + var allDeps = { - ContextSrvStub: ContextSrvStub, - TemplateSrvStub: TemplateSrvStub, - TimeSrvStub: TimeSrvStub, - ControllerTestContext: ControllerTestContext, - ServiceTestContext: ServiceTestContext, - DashboardViewStateStub: DashboardViewStateStub, + ContextSrvStub, + TemplateSrvStub, + TimeSrvStub, + ControllerTestContext, + ServiceTestContext, + DashboardViewStateStub, + jestTimeSrvStub, + jestTemplateSrvStub, }; // for legacy From 8f1bcd91178375c007688e022677611cd5d4c9f7 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 3 Jul 2018 13:33:30 +0200 Subject: [PATCH 075/263] Remove q and stub --- .../datasource/elasticsearch/specs/datasource.jest.ts | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts index bfd55544d7bc..b6d19d003da9 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts @@ -2,9 +2,7 @@ import _ from 'lodash'; import moment from 'moment'; import angular from 'angular'; import { ElasticDatasource } from '../datasource'; -import { jestTimeSrvStub } from 'test/specs/helpers'; -import { jestTemplateSrvStub } from 'test/specs/helpers'; -import $q from 'q'; +import { jestTimeSrvStub, TemplateSrvStub } from 'test/specs/helpers'; describe('ElasticDatasource', function() { let backendSrv = { @@ -16,14 +14,13 @@ describe('ElasticDatasource', function() { appEvent: jest.fn(), }; - let templateSrv = new jestTemplateSrvStub(); + let templateSrv = new TemplateSrvStub(); let timeSrv = new jestTimeSrvStub(); let ctx = { $rootScope, backendSrv, - $q, }; function createDatasource(instanceSettings) { From 12d158f391d0256802ff5be4bb0fb1f9335f4b66 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 4 Jul 2018 10:43:36 +0200 Subject: [PATCH 076/263] Add mocks in test file --- .../elasticsearch/specs/datasource.jest.ts | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts index b6d19d003da9..e9f7a61a5722 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts @@ -2,7 +2,8 @@ import _ from 'lodash'; import moment from 'moment'; import angular from 'angular'; import { ElasticDatasource } from '../datasource'; -import { jestTimeSrvStub, TemplateSrvStub } from 'test/specs/helpers'; + +import * as dateMath from 'app/core/utils/datemath'; describe('ElasticDatasource', function() { let backendSrv = { @@ -14,9 +15,23 @@ describe('ElasticDatasource', function() { appEvent: jest.fn(), }; - let templateSrv = new TemplateSrvStub(); + let templateSrv = { + replace: jest.fn(text => text), + getAdhocFilters: jest.fn(() => []), + }; - let timeSrv = new jestTimeSrvStub(); + let timeSrv = { + time: { from: 'now-1h', to: 'now' }, + timeRange: jest.fn(parse => { + return { + from: dateMath.parse(this.time.from, false), + to: dateMath.parse(this.time.to, true), + }; + }), + setTime: jest.fn(time => { + this.time = time; + }), + }; let ctx = { $rootScope, From d3c213973c7d6eeb0eb2affbf2178bad2713c641 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 4 Jul 2018 11:23:12 +0200 Subject: [PATCH 077/263] Basic cleanup --- .../elasticsearch/specs/datasource.jest.ts | 2 +- public/test/specs/helpers.ts | 39 ------------------- 2 files changed, 1 insertion(+), 40 deletions(-) diff --git a/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts index e9f7a61a5722..36e7a63a0059 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/datasource.jest.ts @@ -22,7 +22,7 @@ describe('ElasticDatasource', function() { let timeSrv = { time: { from: 'now-1h', to: 'now' }, - timeRange: jest.fn(parse => { + timeRange: jest.fn(() => { return { from: dateMath.parse(this.time.from, false), to: dateMath.parse(this.time.to, true), diff --git a/public/test/specs/helpers.ts b/public/test/specs/helpers.ts index d98e79ff06b6..677419f3f756 100644 --- a/public/test/specs/helpers.ts +++ b/public/test/specs/helpers.ts @@ -195,43 +195,6 @@ export function TemplateSrvStub() { }; } -export function jestTemplateSrvStub() { - this.variables = []; - this.templateSettings = { interpolate: /\[\[([\s\S]+?)\]\]/g }; - this.data = {}; - this.replace = jest.fn(text => _.template(text, this.templateSettings)(this.data)); - this.init = jest.fn(); - this.getAdhocFilters = jest.fn(() => []); - this.fillVariableValuesForUrl = jest.fn(); - this.updateTemplateData = jest.fn(); - this.variableExists = jest.fn(() => false); - this.variableInitialized = jest.fn(); - this.highlightVariablesAsHtml = jest.fn(str => str); - this.setGrafanaVariable = jest.fn((name, value) => { - this.data[name] = value; - }); -} - -export function jestTimeSrvStub() { - this.init = jest.fn(); - this.time = { from: 'now-1h', to: 'now' }; - this.timeRange = jest.fn(parse => { - if (parse === false) { - return this.time; - } - return { - from: dateMath.parse(this.time.from, false), - to: dateMath.parse(this.time.to, true), - }; - }); - - this.replace = jest.fn(target => target); - - this.setTime = jest.fn(time => { - this.time = time; - }); -} - var allDeps = { ContextSrvStub, TemplateSrvStub, @@ -239,8 +202,6 @@ var allDeps = { ControllerTestContext, ServiceTestContext, DashboardViewStateStub, - jestTimeSrvStub, - jestTemplateSrvStub, }; // for legacy From 3bbe39c5ad92102412229717c34214c1af401354 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 3 Jul 2018 12:45:04 +0200 Subject: [PATCH 078/263] Karma to Jest: postgres datasource --- ...datasource_specs.ts => datasource.jest.ts} | 106 ++++++++---------- 1 file changed, 46 insertions(+), 60 deletions(-) rename public/app/plugins/datasource/postgres/specs/{datasource_specs.ts => datasource.jest.ts} (66%) diff --git a/public/app/plugins/datasource/postgres/specs/datasource_specs.ts b/public/app/plugins/datasource/postgres/specs/datasource.jest.ts similarity index 66% rename from public/app/plugins/datasource/postgres/specs/datasource_specs.ts rename to public/app/plugins/datasource/postgres/specs/datasource.jest.ts index f17ad019f267..d00e036a5148 100644 --- a/public/app/plugins/datasource/postgres/specs/datasource_specs.ts +++ b/public/app/plugins/datasource/postgres/specs/datasource.jest.ts @@ -1,28 +1,17 @@ -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; import moment from 'moment'; -import helpers from 'test/specs/helpers'; import { PostgresDatasource } from '../datasource'; import { CustomVariable } from 'app/features/templating/custom_variable'; +import { TemplateSrvStub } from 'test/specs/helpers'; describe('PostgreSQLDatasource', function() { - var ctx = new helpers.ServiceTestContext(); - var instanceSettings = { name: 'postgresql' }; - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach(ctx.providePhase(['backendSrv'])); - - beforeEach( - angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) { - ctx.$q = $q; - ctx.$httpBackend = $httpBackend; - ctx.$rootScope = $rootScope; - ctx.ds = $injector.instantiate(PostgresDatasource, { - instanceSettings: instanceSettings, - }); - $httpBackend.when('GET', /\.html$/).respond(''); - }) - ); + let instanceSettings = { name: 'postgresql' }; + + let backendSrv = {}; + let templateSrv = new TemplateSrvStub(); + let ctx = { + ds: new PostgresDatasource(instanceSettings, backendSrv, {}, templateSrv), + backendSrv + }; describe('When performing annotationQuery', function() { let results; @@ -59,26 +48,25 @@ describe('PostgreSQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = function(options) { - return ctx.$q.when({ data: response, status: 200 }); - }; + ctx.backendSrv.datasourceRequest = jest.fn((options) => { + return Promise.resolve({ data: response, status: 200 }); + }); ctx.ds.annotationQuery(options).then(function(data) { results = data; }); - ctx.$rootScope.$apply(); }); it('should return annotation list', function() { - expect(results.length).to.be(3); + expect(results.length).toBe(3); - expect(results[0].text).to.be('some text'); - expect(results[0].tags[0]).to.be('TagA'); - expect(results[0].tags[1]).to.be('TagB'); + expect(results[0].text).toBe('some text'); + expect(results[0].tags[0]).toBe('TagA'); + expect(results[0].tags[1]).toBe('TagB'); - expect(results[1].tags[0]).to.be('TagB'); - expect(results[1].tags[1]).to.be('TagC'); + expect(results[1].tags[0]).toBe('TagB'); + expect(results[1].tags[1]).toBe('TagC'); - expect(results[2].tags.length).to.be(0); + expect(results[2].tags.length).toBe(0); }); }); @@ -103,19 +91,18 @@ describe('PostgreSQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = function(options) { - return ctx.$q.when({ data: response, status: 200 }); - }; + ctx.backendSrv.datasourceRequest = jest.fn((options) => { + return Promise.resolve({ data: response, status: 200 }); + }); ctx.ds.metricFindQuery(query).then(function(data) { results = data; }); - ctx.$rootScope.$apply(); }); it('should return list of all column values', function() { - expect(results.length).to.be(6); - expect(results[0].text).to.be('aTitle'); - expect(results[5].text).to.be('some text3'); + expect(results.length).toBe(6); + expect(results[0].text).toBe('aTitle'); + expect(results[5].text).toBe('some text3'); }); }); @@ -140,21 +127,20 @@ describe('PostgreSQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = function(options) { - return ctx.$q.when({ data: response, status: 200 }); - }; + ctx.backendSrv.datasourceRequest = jest.fn((options) => { + return Promise.resolve({ data: response, status: 200 }); + }); ctx.ds.metricFindQuery(query).then(function(data) { results = data; }); - ctx.$rootScope.$apply(); }); it('should return list of as text, value', function() { - expect(results.length).to.be(3); - expect(results[0].text).to.be('aTitle'); - expect(results[0].value).to.be('value1'); - expect(results[2].text).to.be('aTitle3'); - expect(results[2].value).to.be('value3'); + expect(results.length).toBe(3); + expect(results[0].text).toBe('aTitle'); + expect(results[0].value).toBe('value1'); + expect(results[2].text).toBe('aTitle3'); + expect(results[2].value).toBe('value3'); }); }); @@ -178,20 +164,20 @@ describe('PostgreSQLDatasource', function() { }, }; - beforeEach(function() { - ctx.backendSrv.datasourceRequest = function(options) { - return ctx.$q.when({ data: response, status: 200 }); - }; + beforeEach(() => { + ctx.backendSrv.datasourceRequest = jest.fn((options) => { + return Promise.resolve({ data: response, status: 200 }); + }); ctx.ds.metricFindQuery(query).then(function(data) { results = data; }); - ctx.$rootScope.$apply(); + //ctx.$rootScope.$apply(); }); it('should return list of unique keys', function() { - expect(results.length).to.be(1); - expect(results[0].text).to.be('aTitle'); - expect(results[0].value).to.be('same'); + expect(results.length).toBe(1); + expect(results[0].text).toBe('aTitle'); + expect(results[0].value).toBe('same'); }); }); @@ -202,33 +188,33 @@ describe('PostgreSQLDatasource', function() { describe('and value is a string', () => { it('should return an unquoted value', () => { - expect(ctx.ds.interpolateVariable('abc', ctx.variable)).to.eql('abc'); + expect(ctx.ds.interpolateVariable('abc', ctx.variable)).toEqual('abc'); }); }); describe('and value is a number', () => { it('should return an unquoted value', () => { - expect(ctx.ds.interpolateVariable(1000, ctx.variable)).to.eql(1000); + expect(ctx.ds.interpolateVariable(1000, ctx.variable)).toEqual(1000); }); }); describe('and value is an array of strings', () => { it('should return comma separated quoted values', () => { - expect(ctx.ds.interpolateVariable(['a', 'b', 'c'], ctx.variable)).to.eql("'a','b','c'"); + expect(ctx.ds.interpolateVariable(['a', 'b', 'c'], ctx.variable)).toEqual("'a','b','c'"); }); }); describe('and variable allows multi-value and is a string', () => { it('should return a quoted value', () => { ctx.variable.multi = true; - expect(ctx.ds.interpolateVariable('abc', ctx.variable)).to.eql("'abc'"); + expect(ctx.ds.interpolateVariable('abc', ctx.variable)).toEqual("'abc'"); }); }); describe('and variable allows all and is a string', () => { it('should return a quoted value', () => { ctx.variable.includeAll = true; - expect(ctx.ds.interpolateVariable('abc', ctx.variable)).to.eql("'abc'"); + expect(ctx.ds.interpolateVariable('abc', ctx.variable)).toEqual("'abc'"); }); }); }); From d4bd52139aaba4091d90c237d199f185b0992164 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 3 Jul 2018 12:56:36 +0200 Subject: [PATCH 079/263] Karma to Jest: MySQL datasource --- ...datasource_specs.ts => datasource.jest.ts} | 104 ++++++++---------- 1 file changed, 45 insertions(+), 59 deletions(-) rename public/app/plugins/datasource/mysql/specs/{datasource_specs.ts => datasource.jest.ts} (66%) diff --git a/public/app/plugins/datasource/mysql/specs/datasource_specs.ts b/public/app/plugins/datasource/mysql/specs/datasource.jest.ts similarity index 66% rename from public/app/plugins/datasource/mysql/specs/datasource_specs.ts rename to public/app/plugins/datasource/mysql/specs/datasource.jest.ts index 0373797f2113..5a5881b3165b 100644 --- a/public/app/plugins/datasource/mysql/specs/datasource_specs.ts +++ b/public/app/plugins/datasource/mysql/specs/datasource.jest.ts @@ -1,28 +1,18 @@ -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; +//import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; import moment from 'moment'; -import helpers from 'test/specs/helpers'; +import { TemplateSrvStub } from 'test/specs/helpers'; import { MysqlDatasource } from '../datasource'; import { CustomVariable } from 'app/features/templating/custom_variable'; describe('MySQLDatasource', function() { - var ctx = new helpers.ServiceTestContext(); - var instanceSettings = { name: 'mysql' }; - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach(ctx.providePhase(['backendSrv'])); - - beforeEach( - angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) { - ctx.$q = $q; - ctx.$httpBackend = $httpBackend; - ctx.$rootScope = $rootScope; - ctx.ds = $injector.instantiate(MysqlDatasource, { - instanceSettings: instanceSettings, - }); - $httpBackend.when('GET', /\.html$/).respond(''); - }) - ); + + let instanceSettings = { name: 'mysql' }; + let backendSrv = {}; + let templateSrv = new TemplateSrvStub(); + let ctx = { + ds: new MysqlDatasource(instanceSettings, backendSrv, {}, templateSrv), + backendSrv + }; describe('When performing annotationQuery', function() { let results; @@ -59,26 +49,25 @@ describe('MySQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = function(options) { - return ctx.$q.when({ data: response, status: 200 }); - }; + ctx.backendSrv.datasourceRequest = jest.fn((options) => { + return Promise.resolve({ data: response, status: 200 }); + }); ctx.ds.annotationQuery(options).then(function(data) { results = data; }); - ctx.$rootScope.$apply(); }); it('should return annotation list', function() { - expect(results.length).to.be(3); + expect(results.length).toBe(3); - expect(results[0].text).to.be('some text'); - expect(results[0].tags[0]).to.be('TagA'); - expect(results[0].tags[1]).to.be('TagB'); + expect(results[0].text).toBe('some text'); + expect(results[0].tags[0]).toBe('TagA'); + expect(results[0].tags[1]).toBe('TagB'); - expect(results[1].tags[0]).to.be('TagB'); - expect(results[1].tags[1]).to.be('TagC'); + expect(results[1].tags[0]).toBe('TagB'); + expect(results[1].tags[1]).toBe('TagC'); - expect(results[2].tags.length).to.be(0); + expect(results[2].tags.length).toBe(0); }); }); @@ -103,19 +92,18 @@ describe('MySQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = function(options) { - return ctx.$q.when({ data: response, status: 200 }); - }; + ctx.backendSrv.datasourceRequest = jest.fn((options) => { + return Promise.resolve({ data: response, status: 200 }); + }); ctx.ds.metricFindQuery(query).then(function(data) { results = data; }); - ctx.$rootScope.$apply(); }); it('should return list of all column values', function() { - expect(results.length).to.be(6); - expect(results[0].text).to.be('aTitle'); - expect(results[5].text).to.be('some text3'); + expect(results.length).toBe(6); + expect(results[0].text).toBe('aTitle'); + expect(results[5].text).toBe('some text3'); }); }); @@ -140,21 +128,20 @@ describe('MySQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = function(options) { - return ctx.$q.when({ data: response, status: 200 }); - }; + ctx.backendSrv.datasourceRequest = jest.fn((options) => { + return Promise.resolve({ data: response, status: 200 }); + }); ctx.ds.metricFindQuery(query).then(function(data) { results = data; }); - ctx.$rootScope.$apply(); }); it('should return list of as text, value', function() { - expect(results.length).to.be(3); - expect(results[0].text).to.be('aTitle'); - expect(results[0].value).to.be('value1'); - expect(results[2].text).to.be('aTitle3'); - expect(results[2].value).to.be('value3'); + expect(results.length).toBe(3); + expect(results[0].text).toBe('aTitle'); + expect(results[0].value).toBe('value1'); + expect(results[2].text).toBe('aTitle3'); + expect(results[2].value).toBe('value3'); }); }); @@ -179,19 +166,18 @@ describe('MySQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = function(options) { - return ctx.$q.when({ data: response, status: 200 }); - }; + ctx.backendSrv.datasourceRequest = jest.fn((options) => { + return Promise.resolve({ data: response, status: 200 }); + }); ctx.ds.metricFindQuery(query).then(function(data) { results = data; }); - ctx.$rootScope.$apply(); }); it('should return list of unique keys', function() { - expect(results.length).to.be(1); - expect(results[0].text).to.be('aTitle'); - expect(results[0].value).to.be('same'); + expect(results.length).toBe(1); + expect(results[0].text).toBe('aTitle'); + expect(results[0].value).toBe('same'); }); }); @@ -202,33 +188,33 @@ describe('MySQLDatasource', function() { describe('and value is a string', () => { it('should return an unquoted value', () => { - expect(ctx.ds.interpolateVariable('abc', ctx.variable)).to.eql('abc'); + expect(ctx.ds.interpolateVariable('abc', ctx.variable)).toEqual('abc'); }); }); describe('and value is a number', () => { it('should return an unquoted value', () => { - expect(ctx.ds.interpolateVariable(1000, ctx.variable)).to.eql(1000); + expect(ctx.ds.interpolateVariable(1000, ctx.variable)).toEqual(1000); }); }); describe('and value is an array of strings', () => { it('should return comma separated quoted values', () => { - expect(ctx.ds.interpolateVariable(['a', 'b', 'c'], ctx.variable)).to.eql("'a','b','c'"); + expect(ctx.ds.interpolateVariable(['a', 'b', 'c'], ctx.variable)).toEqual("'a','b','c'"); }); }); describe('and variable allows multi-value and value is a string', () => { it('should return a quoted value', () => { ctx.variable.multi = true; - expect(ctx.ds.interpolateVariable('abc', ctx.variable)).to.eql("'abc'"); + expect(ctx.ds.interpolateVariable('abc', ctx.variable)).toEqual("'abc'"); }); }); describe('and variable allows all and value is a string', () => { it('should return a quoted value', () => { ctx.variable.includeAll = true; - expect(ctx.ds.interpolateVariable('abc', ctx.variable)).to.eql("'abc'"); + expect(ctx.ds.interpolateVariable('abc', ctx.variable)).toEqual("'abc'"); }); }); }); From 42ba0dc73f3037aaa51f63ced03fc6eb0fa02452 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 3 Jul 2018 13:20:36 +0200 Subject: [PATCH 080/263] Karma to Jest: Cloudwatch datasource --- ...datasource_specs.ts => datasource.jest.ts} | 162 ++++++++++-------- public/test/specs/helpers.ts | 18 ++ 2 files changed, 104 insertions(+), 76 deletions(-) rename public/app/plugins/datasource/cloudwatch/specs/{datasource_specs.ts => datasource.jest.ts} (74%) diff --git a/public/app/plugins/datasource/cloudwatch/specs/datasource_specs.ts b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts similarity index 74% rename from public/app/plugins/datasource/cloudwatch/specs/datasource_specs.ts rename to public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts index b2c4950d3ce8..705c35f2ebc3 100644 --- a/public/app/plugins/datasource/cloudwatch/specs/datasource_specs.ts +++ b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts @@ -1,32 +1,42 @@ import '../datasource'; -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; -import helpers from 'test/specs/helpers'; +//import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; +import { TemplateSrvStub, jestTimeSrvStub } from 'test/specs/helpers'; import CloudWatchDatasource from '../datasource'; import 'app/features/dashboard/time_srv'; describe('CloudWatchDatasource', function() { - var ctx = new helpers.ServiceTestContext(); - var instanceSettings = { + + let instanceSettings = { jsonData: { defaultRegion: 'us-east-1', access: 'proxy' }, }; + let templateSrv = new TemplateSrvStub(); + let timeSrv = new jestTimeSrvStub(); + let backendSrv = {}; + let ctx = { + backendSrv, + templateSrv + }; - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(ctx.providePhase(['templateSrv', 'backendSrv'])); - beforeEach(ctx.createService('timeSrv')); - - beforeEach( - angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) { - ctx.$q = $q; - ctx.$httpBackend = $httpBackend; - ctx.$rootScope = $rootScope; - ctx.ds = $injector.instantiate(CloudWatchDatasource, { - instanceSettings: instanceSettings, - }); - $httpBackend.when('GET', /\.html$/).respond(''); - }) - ); + beforeEach(() => { + ctx.ds = new CloudWatchDatasource(instanceSettings, {}, backendSrv, templateSrv, timeSrv); + }); + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach(ctx.providePhase(['templateSrv', 'backendSrv'])); + // beforeEach(ctx.createService('timeSrv')); + + // beforeEach( + // angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) { + // ctx.$q = $q; + // ctx.$httpBackend = $httpBackend; + // ctx.$rootScope = $rootScope; + // ctx.ds = $injector.instantiate(CloudWatchDatasource, { + // instanceSettings: instanceSettings, + // }); + // $httpBackend.when('GET', /\.html$/).respond(''); + // }) + // ); describe('When performing CloudWatch query', function() { var requestParams; @@ -67,24 +77,24 @@ describe('CloudWatchDatasource', function() { }, }; - beforeEach(function() { - ctx.backendSrv.datasourceRequest = function(params) { + beforeEach(async () => { + ctx.backendSrv.datasourceRequest = await jest.fn((params) => { requestParams = params.data; - return ctx.$q.when({ data: response }); - }; + return Promise.resolve({ data: response }); + }); }); it('should generate the correct query', function(done) { ctx.ds.query(query).then(function() { var params = requestParams.queries[0]; - expect(params.namespace).to.be(query.targets[0].namespace); - expect(params.metricName).to.be(query.targets[0].metricName); - expect(params.dimensions['InstanceId']).to.be('i-12345678'); - expect(params.statistics).to.eql(query.targets[0].statistics); - expect(params.period).to.be(query.targets[0].period); + expect(params.namespace).toBe(query.targets[0].namespace); + expect(params.metricName).toBe(query.targets[0].metricName); + expect(params.dimensions['InstanceId']).toBe('i-12345678'); + expect(params.statistics).toEqual(query.targets[0].statistics); + expect(params.period).toBe(query.targets[0].period); done(); }); - ctx.$rootScope.$apply(); + //ctx.$rootScope.$apply(); }); it('should generate the correct query with interval variable', function(done) { @@ -111,19 +121,19 @@ describe('CloudWatchDatasource', function() { ctx.ds.query(query).then(function() { var params = requestParams.queries[0]; - expect(params.period).to.be('600'); + expect(params.period).toBe('600'); done(); }); - ctx.$rootScope.$apply(); + //ctx.$rootScope.$apply(); }); it('should return series list', function(done) { ctx.ds.query(query).then(function(result) { - expect(result.data[0].target).to.be(response.results.A.series[0].name); - expect(result.data[0].datapoints[0][0]).to.be(response.results.A.series[0].points[0][0]); + expect(result.data[0].target).toBe(response.results.A.series[0].name); + expect(result.data[0].datapoints[0][0]).toBe(response.results.A.series[0].points[0][0]); done(); }); - ctx.$rootScope.$apply(); + //ctx.$rootScope.$apply(); }); it('should generate the correct targets by expanding template variables', function() { @@ -173,7 +183,7 @@ describe('CloudWatchDatasource', function() { ]; var result = ctx.ds.expandTemplateVariable(targets, {}, templateSrv); - expect(result[0].dimensions.InstanceId).to.be('i-34567890'); + expect(result[0].dimensions.InstanceId).toBe('i-34567890'); }); it('should generate the correct targets by expanding template variables from url', function() { @@ -220,7 +230,7 @@ describe('CloudWatchDatasource', function() { ]; var result = ctx.ds.expandTemplateVariable(targets, {}, templateSrv); - expect(result[0].dimensions.InstanceId).to.be('i-45678901'); + expect(result[0].dimensions.InstanceId).toBe('i-45678901'); }); }); @@ -228,21 +238,21 @@ describe('CloudWatchDatasource', function() { it('should return the datasource region if empty or "default"', function() { var defaultRegion = instanceSettings.jsonData.defaultRegion; - expect(ctx.ds.getActualRegion()).to.be(defaultRegion); - expect(ctx.ds.getActualRegion('')).to.be(defaultRegion); - expect(ctx.ds.getActualRegion('default')).to.be(defaultRegion); + expect(ctx.ds.getActualRegion()).toBe(defaultRegion); + expect(ctx.ds.getActualRegion('')).toBe(defaultRegion); + expect(ctx.ds.getActualRegion('default')).toBe(defaultRegion); }); it('should return the specified region if specified', function() { - expect(ctx.ds.getActualRegion('some-fake-region-1')).to.be('some-fake-region-1'); + expect(ctx.ds.getActualRegion('some-fake-region-1')).toBe('some-fake-region-1'); }); var requestParams; beforeEach(function() { - ctx.ds.performTimeSeriesQuery = function(request) { + ctx.ds.performTimeSeriesQuery = jest.fn((request) => { requestParams = request; - return ctx.$q.when({ data: {} }); - }; + return Promise.resolve({ data: {} }); + }); }); it('should query for the datasource region if empty or "default"', function(done) { @@ -264,10 +274,10 @@ describe('CloudWatchDatasource', function() { }; ctx.ds.query(query).then(function(result) { - expect(requestParams.queries[0].region).to.be(instanceSettings.jsonData.defaultRegion); + expect(requestParams.queries[0].region).toBe(instanceSettings.jsonData.defaultRegion); done(); }); - ctx.$rootScope.$apply(); + //ctx.$rootScope.$apply(); }); }); @@ -311,18 +321,18 @@ describe('CloudWatchDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = function(params) { - return ctx.$q.when({ data: response }); - }; + ctx.backendSrv.datasourceRequest = jest.fn((params) => { + return Promise.resolve({ data: response }); + }); }); it('should return series list', function(done) { ctx.ds.query(query).then(function(result) { - expect(result.data[0].target).to.be(response.results.A.series[0].name); - expect(result.data[0].datapoints[0][0]).to.be(response.results.A.series[0].points[0][0]); + expect(result.data[0].target).toBe(response.results.A.series[0].name); + expect(result.data[0].datapoints[0][0]).toBe(response.results.A.series[0].points[0][0]); done(); }); - ctx.$rootScope.$apply(); + //ctx.$rootScope.$apply(); }); }); @@ -332,14 +342,14 @@ describe('CloudWatchDatasource', function() { scenario.setup = setupCallback => { beforeEach(() => { setupCallback(); - ctx.backendSrv.datasourceRequest = args => { + ctx.backendSrv.datasourceRequest = jest.fn((args) => { scenario.request = args.data; - return ctx.$q.when({ data: scenario.requestResponse }); - }; + return Promise.resolve({ data: scenario.requestResponse }); + }); ctx.ds.metricFindQuery(query).then(args => { scenario.result = args; }); - ctx.$rootScope.$apply(); + //ctx.$rootScope.$apply(); }); }; @@ -359,9 +369,9 @@ describe('CloudWatchDatasource', function() { }); it('should call __GetRegions and return result', () => { - expect(scenario.result[0].text).to.contain('us-east-1'); - expect(scenario.request.queries[0].type).to.be('metricFindQuery'); - expect(scenario.request.queries[0].subtype).to.be('regions'); + expect(scenario.result[0].text).toContain('us-east-1'); + expect(scenario.request.queries[0].type).toBe('metricFindQuery'); + expect(scenario.request.queries[0].subtype).toBe('regions'); }); }); @@ -377,9 +387,9 @@ describe('CloudWatchDatasource', function() { }); it('should call __GetNamespaces and return result', () => { - expect(scenario.result[0].text).to.contain('AWS/EC2'); - expect(scenario.request.queries[0].type).to.be('metricFindQuery'); - expect(scenario.request.queries[0].subtype).to.be('namespaces'); + expect(scenario.result[0].text).toContain('AWS/EC2'); + expect(scenario.request.queries[0].type).toBe('metricFindQuery'); + expect(scenario.request.queries[0].subtype).toBe('namespaces'); }); }); @@ -395,9 +405,9 @@ describe('CloudWatchDatasource', function() { }); it('should call __GetMetrics and return result', () => { - expect(scenario.result[0].text).to.be('CPUUtilization'); - expect(scenario.request.queries[0].type).to.be('metricFindQuery'); - expect(scenario.request.queries[0].subtype).to.be('metrics'); + expect(scenario.result[0].text).toBe('CPUUtilization'); + expect(scenario.request.queries[0].type).toBe('metricFindQuery'); + expect(scenario.request.queries[0].subtype).toBe('metrics'); }); }); @@ -413,9 +423,9 @@ describe('CloudWatchDatasource', function() { }); it('should call __GetDimensions and return result', () => { - expect(scenario.result[0].text).to.be('InstanceId'); - expect(scenario.request.queries[0].type).to.be('metricFindQuery'); - expect(scenario.request.queries[0].subtype).to.be('dimension_keys'); + expect(scenario.result[0].text).toBe('InstanceId'); + expect(scenario.request.queries[0].type).toBe('metricFindQuery'); + expect(scenario.request.queries[0].subtype).toBe('dimension_keys'); }); }); @@ -431,9 +441,9 @@ describe('CloudWatchDatasource', function() { }); it('should call __ListMetrics and return result', () => { - expect(scenario.result[0].text).to.contain('i-12345678'); - expect(scenario.request.queries[0].type).to.be('metricFindQuery'); - expect(scenario.request.queries[0].subtype).to.be('dimension_values'); + expect(scenario.result[0].text).toContain('i-12345678'); + expect(scenario.request.queries[0].type).toBe('metricFindQuery'); + expect(scenario.request.queries[0].subtype).toBe('dimension_values'); }); }); @@ -449,9 +459,9 @@ describe('CloudWatchDatasource', function() { }); it('should call __ListMetrics and return result', () => { - expect(scenario.result[0].text).to.contain('i-12345678'); - expect(scenario.request.queries[0].type).to.be('metricFindQuery'); - expect(scenario.request.queries[0].subtype).to.be('dimension_values'); + expect(scenario.result[0].text).toContain('i-12345678'); + expect(scenario.request.queries[0].type).toBe('metricFindQuery'); + expect(scenario.request.queries[0].subtype).toBe('dimension_values'); }); }); @@ -544,7 +554,7 @@ describe('CloudWatchDatasource', function() { let now = new Date(options.range.from.valueOf() + t[2] * 1000); let expected = t[3]; let actual = ctx.ds.getPeriod(target, options, now); - expect(actual).to.be(expected); + expect(actual).toBe(expected); } }); }); diff --git a/public/test/specs/helpers.ts b/public/test/specs/helpers.ts index 677419f3f756..811587b2c912 100644 --- a/public/test/specs/helpers.ts +++ b/public/test/specs/helpers.ts @@ -195,6 +195,24 @@ export function TemplateSrvStub() { }; } +export function jestTimeSrvStub() { + this.init = jest.fn(); + this.time = { from: 'now-1h', to: 'now' }; + this.timeRange = jest.fn(parse => { + if (parse === false) { + return this.time; + } + return { + from: dateMath.parse(this.time.from, false), + to: dateMath.parse(this.time.to, true), + }; + }); + this.replace = jest.fn(target => target); + this.setTime = jest.fn(time => { + this.time = time; + }); +} + var allDeps = { ContextSrvStub, TemplateSrvStub, From a37a0c7be3c05ba325384751153959b3dfe3381f Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 3 Jul 2018 13:21:59 +0200 Subject: [PATCH 081/263] Remove comments --- .../cloudwatch/specs/datasource.jest.ts | 34 +++---------------- 1 file changed, 5 insertions(+), 29 deletions(-) diff --git a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts index 705c35f2ebc3..4e479e6a83e8 100644 --- a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts @@ -5,7 +5,6 @@ import CloudWatchDatasource from '../datasource'; import 'app/features/dashboard/time_srv'; describe('CloudWatchDatasource', function() { - let instanceSettings = { jsonData: { defaultRegion: 'us-east-1', access: 'proxy' }, }; @@ -14,29 +13,12 @@ describe('CloudWatchDatasource', function() { let backendSrv = {}; let ctx = { backendSrv, - templateSrv + templateSrv, }; beforeEach(() => { ctx.ds = new CloudWatchDatasource(instanceSettings, {}, backendSrv, templateSrv, timeSrv); }); - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach(ctx.providePhase(['templateSrv', 'backendSrv'])); - // beforeEach(ctx.createService('timeSrv')); - - // beforeEach( - // angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) { - // ctx.$q = $q; - // ctx.$httpBackend = $httpBackend; - // ctx.$rootScope = $rootScope; - // ctx.ds = $injector.instantiate(CloudWatchDatasource, { - // instanceSettings: instanceSettings, - // }); - // $httpBackend.when('GET', /\.html$/).respond(''); - // }) - // ); describe('When performing CloudWatch query', function() { var requestParams; @@ -78,7 +60,7 @@ describe('CloudWatchDatasource', function() { }; beforeEach(async () => { - ctx.backendSrv.datasourceRequest = await jest.fn((params) => { + ctx.backendSrv.datasourceRequest = await jest.fn(params => { requestParams = params.data; return Promise.resolve({ data: response }); }); @@ -94,7 +76,6 @@ describe('CloudWatchDatasource', function() { expect(params.period).toBe(query.targets[0].period); done(); }); - //ctx.$rootScope.$apply(); }); it('should generate the correct query with interval variable', function(done) { @@ -124,7 +105,6 @@ describe('CloudWatchDatasource', function() { expect(params.period).toBe('600'); done(); }); - //ctx.$rootScope.$apply(); }); it('should return series list', function(done) { @@ -133,7 +113,6 @@ describe('CloudWatchDatasource', function() { expect(result.data[0].datapoints[0][0]).toBe(response.results.A.series[0].points[0][0]); done(); }); - //ctx.$rootScope.$apply(); }); it('should generate the correct targets by expanding template variables', function() { @@ -249,7 +228,7 @@ describe('CloudWatchDatasource', function() { var requestParams; beforeEach(function() { - ctx.ds.performTimeSeriesQuery = jest.fn((request) => { + ctx.ds.performTimeSeriesQuery = jest.fn(request => { requestParams = request; return Promise.resolve({ data: {} }); }); @@ -277,7 +256,6 @@ describe('CloudWatchDatasource', function() { expect(requestParams.queries[0].region).toBe(instanceSettings.jsonData.defaultRegion); done(); }); - //ctx.$rootScope.$apply(); }); }); @@ -321,7 +299,7 @@ describe('CloudWatchDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = jest.fn((params) => { + ctx.backendSrv.datasourceRequest = jest.fn(params => { return Promise.resolve({ data: response }); }); }); @@ -332,7 +310,6 @@ describe('CloudWatchDatasource', function() { expect(result.data[0].datapoints[0][0]).toBe(response.results.A.series[0].points[0][0]); done(); }); - //ctx.$rootScope.$apply(); }); }); @@ -342,14 +319,13 @@ describe('CloudWatchDatasource', function() { scenario.setup = setupCallback => { beforeEach(() => { setupCallback(); - ctx.backendSrv.datasourceRequest = jest.fn((args) => { + ctx.backendSrv.datasourceRequest = jest.fn(args => { scenario.request = args.data; return Promise.resolve({ data: scenario.requestResponse }); }); ctx.ds.metricFindQuery(query).then(args => { scenario.result = args; }); - //ctx.$rootScope.$apply(); }); }; From 962340bcd1adce74412cf57e04e920efa761b00f Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 3 Jul 2018 13:26:42 +0200 Subject: [PATCH 082/263] Create new instance in beforeEach --- .../datasource/mysql/specs/datasource.jest.ts | 16 +++++++++------- .../datasource/postgres/specs/datasource.jest.ts | 15 +++++++++------ 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/public/app/plugins/datasource/mysql/specs/datasource.jest.ts b/public/app/plugins/datasource/mysql/specs/datasource.jest.ts index 5a5881b3165b..5a97e4b441d0 100644 --- a/public/app/plugins/datasource/mysql/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/mysql/specs/datasource.jest.ts @@ -5,15 +5,17 @@ import { MysqlDatasource } from '../datasource'; import { CustomVariable } from 'app/features/templating/custom_variable'; describe('MySQLDatasource', function() { - let instanceSettings = { name: 'mysql' }; let backendSrv = {}; let templateSrv = new TemplateSrvStub(); let ctx = { - ds: new MysqlDatasource(instanceSettings, backendSrv, {}, templateSrv), - backendSrv + backendSrv, }; + beforeEach(() => { + ctx.ds = new MysqlDatasource(instanceSettings, backendSrv, {}, templateSrv); + }); + describe('When performing annotationQuery', function() { let results; @@ -49,7 +51,7 @@ describe('MySQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = jest.fn((options) => { + ctx.backendSrv.datasourceRequest = jest.fn(options => { return Promise.resolve({ data: response, status: 200 }); }); ctx.ds.annotationQuery(options).then(function(data) { @@ -92,7 +94,7 @@ describe('MySQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = jest.fn((options) => { + ctx.backendSrv.datasourceRequest = jest.fn(options => { return Promise.resolve({ data: response, status: 200 }); }); ctx.ds.metricFindQuery(query).then(function(data) { @@ -128,7 +130,7 @@ describe('MySQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = jest.fn((options) => { + ctx.backendSrv.datasourceRequest = jest.fn(options => { return Promise.resolve({ data: response, status: 200 }); }); ctx.ds.metricFindQuery(query).then(function(data) { @@ -166,7 +168,7 @@ describe('MySQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = jest.fn((options) => { + ctx.backendSrv.datasourceRequest = jest.fn(options => { return Promise.resolve({ data: response, status: 200 }); }); ctx.ds.metricFindQuery(query).then(function(data) { diff --git a/public/app/plugins/datasource/postgres/specs/datasource.jest.ts b/public/app/plugins/datasource/postgres/specs/datasource.jest.ts index d00e036a5148..87e4c3483794 100644 --- a/public/app/plugins/datasource/postgres/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/postgres/specs/datasource.jest.ts @@ -9,10 +9,13 @@ describe('PostgreSQLDatasource', function() { let backendSrv = {}; let templateSrv = new TemplateSrvStub(); let ctx = { - ds: new PostgresDatasource(instanceSettings, backendSrv, {}, templateSrv), - backendSrv + backendSrv, }; + beforeEach(() => { + ctx.ds = new PostgresDatasource(instanceSettings, backendSrv, {}, templateSrv); + }); + describe('When performing annotationQuery', function() { let results; @@ -48,7 +51,7 @@ describe('PostgreSQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = jest.fn((options) => { + ctx.backendSrv.datasourceRequest = jest.fn(options => { return Promise.resolve({ data: response, status: 200 }); }); ctx.ds.annotationQuery(options).then(function(data) { @@ -91,7 +94,7 @@ describe('PostgreSQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = jest.fn((options) => { + ctx.backendSrv.datasourceRequest = jest.fn(options => { return Promise.resolve({ data: response, status: 200 }); }); ctx.ds.metricFindQuery(query).then(function(data) { @@ -127,7 +130,7 @@ describe('PostgreSQLDatasource', function() { }; beforeEach(function() { - ctx.backendSrv.datasourceRequest = jest.fn((options) => { + ctx.backendSrv.datasourceRequest = jest.fn(options => { return Promise.resolve({ data: response, status: 200 }); }); ctx.ds.metricFindQuery(query).then(function(data) { @@ -165,7 +168,7 @@ describe('PostgreSQLDatasource', function() { }; beforeEach(() => { - ctx.backendSrv.datasourceRequest = jest.fn((options) => { + ctx.backendSrv.datasourceRequest = jest.fn(options => { return Promise.resolve({ data: response, status: 200 }); }); ctx.ds.metricFindQuery(query).then(function(data) { From 74bf80962d395ce2280434bf52ea4a44626f934c Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 4 Jul 2018 11:16:31 +0200 Subject: [PATCH 083/263] Add mock to test files --- .../cloudwatch/specs/datasource.jest.ts | 15 ++++++++++++--- .../datasource/mysql/specs/datasource.jest.ts | 7 ++++--- .../postgres/specs/datasource.jest.ts | 5 +++-- public/test/specs/helpers.ts | 18 ------------------ 4 files changed, 19 insertions(+), 26 deletions(-) diff --git a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts index 4e479e6a83e8..fbeaaad8e676 100644 --- a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts @@ -1,15 +1,24 @@ import '../datasource'; -//import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; -import { TemplateSrvStub, jestTimeSrvStub } from 'test/specs/helpers'; +import { TemplateSrvStub } from 'test/specs/helpers'; import CloudWatchDatasource from '../datasource'; import 'app/features/dashboard/time_srv'; +import * as dateMath from 'app/core/utils/datemath'; describe('CloudWatchDatasource', function() { let instanceSettings = { jsonData: { defaultRegion: 'us-east-1', access: 'proxy' }, }; let templateSrv = new TemplateSrvStub(); - let timeSrv = new jestTimeSrvStub(); + + let timeSrv = { + time: { from: 'now-1h', to: 'now' }, + timeRange: jest.fn(() => { + return { + from: dateMath.parse(timeSrv.time.from, false), + to: dateMath.parse(timeSrv.time.to, true), + }; + }), + }; let backendSrv = {}; let ctx = { backendSrv, diff --git a/public/app/plugins/datasource/mysql/specs/datasource.jest.ts b/public/app/plugins/datasource/mysql/specs/datasource.jest.ts index 5a97e4b441d0..be33f5f88589 100644 --- a/public/app/plugins/datasource/mysql/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/mysql/specs/datasource.jest.ts @@ -1,13 +1,14 @@ -//import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; import moment from 'moment'; -import { TemplateSrvStub } from 'test/specs/helpers'; import { MysqlDatasource } from '../datasource'; import { CustomVariable } from 'app/features/templating/custom_variable'; describe('MySQLDatasource', function() { let instanceSettings = { name: 'mysql' }; let backendSrv = {}; - let templateSrv = new TemplateSrvStub(); + let templateSrv = { + replace: jest.fn(text => text), + }; + let ctx = { backendSrv, }; diff --git a/public/app/plugins/datasource/postgres/specs/datasource.jest.ts b/public/app/plugins/datasource/postgres/specs/datasource.jest.ts index 87e4c3483794..107cd76e6c53 100644 --- a/public/app/plugins/datasource/postgres/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/postgres/specs/datasource.jest.ts @@ -1,13 +1,14 @@ import moment from 'moment'; import { PostgresDatasource } from '../datasource'; import { CustomVariable } from 'app/features/templating/custom_variable'; -import { TemplateSrvStub } from 'test/specs/helpers'; describe('PostgreSQLDatasource', function() { let instanceSettings = { name: 'postgresql' }; let backendSrv = {}; - let templateSrv = new TemplateSrvStub(); + let templateSrv = { + replace: jest.fn(text => text), + }; let ctx = { backendSrv, }; diff --git a/public/test/specs/helpers.ts b/public/test/specs/helpers.ts index 811587b2c912..677419f3f756 100644 --- a/public/test/specs/helpers.ts +++ b/public/test/specs/helpers.ts @@ -195,24 +195,6 @@ export function TemplateSrvStub() { }; } -export function jestTimeSrvStub() { - this.init = jest.fn(); - this.time = { from: 'now-1h', to: 'now' }; - this.timeRange = jest.fn(parse => { - if (parse === false) { - return this.time; - } - return { - from: dateMath.parse(this.time.from, false), - to: dateMath.parse(this.time.to, true), - }; - }); - this.replace = jest.fn(target => target); - this.setTime = jest.fn(time => { - this.time = time; - }); -} - var allDeps = { ContextSrvStub, TemplateSrvStub, From 240cf63ba6ad8c07a137d1c01f5034ee90422379 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 4 Jul 2018 20:07:43 +0200 Subject: [PATCH 084/263] changelog: add notes about closing #11618 #11619 --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6ddc9497dbc0..0177546b643d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,8 +12,10 @@ * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) +* **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) + # 5.2.1 (2018-06-29) ### Minor From 50a522d418406955d82d0e08c21d28a4f0069fea Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 5 Jul 2018 13:01:10 +0200 Subject: [PATCH 085/263] fix caret for help button is ds http settings --- public/app/features/plugins/partials/ds_http_settings.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/app/features/plugins/partials/ds_http_settings.html b/public/app/features/plugins/partials/ds_http_settings.html index 3af185d862ce..6d014af567c5 100644 --- a/public/app/features/plugins/partials/ds_http_settings.html +++ b/public/app/features/plugins/partials/ds_http_settings.html @@ -32,8 +32,8 @@

    HTTP

    From 36f08994ccd3f2275472ef39a1714a7c50bba1c5 Mon Sep 17 00:00:00 2001 From: Alexander Zobnin Date: Thu, 5 Jul 2018 16:12:03 +0300 Subject: [PATCH 086/263] prometheus heatmap: fix unhandled error when some points are missing --- .../prometheus/result_transformer.ts | 18 ++++++++--- .../specs/result_transformer.jest.ts | 30 +++++++++++++++++++ .../app/plugins/panel/heatmap/heatmap_ctrl.ts | 4 +++ 3 files changed, 48 insertions(+), 4 deletions(-) diff --git a/public/app/plugins/datasource/prometheus/result_transformer.ts b/public/app/plugins/datasource/prometheus/result_transformer.ts index 7f5430bf7d6d..b6d8a32af5f5 100644 --- a/public/app/plugins/datasource/prometheus/result_transformer.ts +++ b/public/app/plugins/datasource/prometheus/result_transformer.ts @@ -28,15 +28,20 @@ export class ResultTransformer { } } - transformMetricData(md, options, start, end) { + transformMetricData(metricData, options, start, end) { let dps = [], metricLabel = null; - metricLabel = this.createMetricLabel(md.metric, options); + metricLabel = this.createMetricLabel(metricData.metric, options); const stepMs = parseInt(options.step) * 1000; let baseTimestamp = start * 1000; - for (let value of md.values) { + + if (metricData.values === undefined) { + throw new Error('Prometheus heatmap error: data should be a time series'); + } + + for (let value of metricData.values) { let dp_value = parseFloat(value[1]); if (_.isNaN(dp_value)) { dp_value = null; @@ -164,8 +169,13 @@ export class ResultTransformer { for (let i = seriesList.length - 1; i > 0; i--) { let topSeries = seriesList[i].datapoints; let bottomSeries = seriesList[i - 1].datapoints; + if (!topSeries || !bottomSeries) { + throw new Error('Prometheus heatmap transform error: data should be a time series'); + } + for (let j = 0; j < topSeries.length; j++) { - topSeries[j][0] -= bottomSeries[j][0]; + const bottomPoint = bottomSeries[j] || [0]; + topSeries[j][0] -= bottomPoint[0]; } } diff --git a/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts b/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts index b94cca790597..c0f2609f5b4a 100644 --- a/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts @@ -126,6 +126,36 @@ describe('Prometheus Result Transformer', () => { { target: '3', datapoints: [[10, 1445000010000], [0, 1445000020000], [10, 1445000030000]] }, ]); }); + + it('should handle missing datapoints', () => { + const seriesList = [ + { datapoints: [[1, 1000], [2, 2000]] }, + { datapoints: [[2, 1000], [5, 2000], [1, 3000]] }, + { datapoints: [[3, 1000], [7, 2000]] }, + ]; + const expected = [ + { datapoints: [[1, 1000], [2, 2000]] }, + { datapoints: [[1, 1000], [3, 2000], [1, 3000]] }, + { datapoints: [[1, 1000], [2, 2000]] }, + ]; + const result = ctx.resultTransformer.transformToHistogramOverTime(seriesList); + expect(result).toEqual(expected); + }); + + it('should throw error when data in wrong format', () => { + const seriesList = [{ rows: [] }, { datapoints: [] }]; + expect(() => { + ctx.resultTransformer.transformToHistogramOverTime(seriesList); + }).toThrow(); + }); + + it('should throw error when prometheus returned non-timeseries', () => { + // should be { metric: {}, values: [] } for timeseries + const metricData = { metric: {}, value: [] }; + expect(() => { + ctx.resultTransformer.transformMetricData(metricData, { step: 1 }, 1000, 2000); + }).toThrow(); + }); }); describe('When resultFormat is time series', () => { diff --git a/public/app/plugins/panel/heatmap/heatmap_ctrl.ts b/public/app/plugins/panel/heatmap/heatmap_ctrl.ts index 11fbad47b99d..31a5afa630e2 100644 --- a/public/app/plugins/panel/heatmap/heatmap_ctrl.ts +++ b/public/app/plugins/panel/heatmap/heatmap_ctrl.ts @@ -302,6 +302,10 @@ export class HeatmapCtrl extends MetricsPanelCtrl { } seriesHandler(seriesData) { + if (seriesData.datapoints === undefined) { + throw new Error('Heatmap error: data should be a time series'); + } + let series = new TimeSeries({ datapoints: seriesData.datapoints, alias: seriesData.target, From 5e4d6958d6258ee396cc03ff3479c982d44e0ace Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 6 Jul 2018 11:54:37 +0200 Subject: [PATCH 087/263] fix links not updating after changing variables --- public/app/features/dashlinks/module.ts | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/public/app/features/dashlinks/module.ts b/public/app/features/dashlinks/module.ts index f9482049c406..380144dbcd53 100644 --- a/public/app/features/dashlinks/module.ts +++ b/public/app/features/dashlinks/module.ts @@ -41,20 +41,20 @@ function dashLink($compile, $sanitize, linkSrv) { elem.html(template); $compile(elem.contents())(scope); - var anchor = elem.find('a'); - var icon = elem.find('i'); - var span = elem.find('span'); - function update() { var linkInfo = linkSrv.getAnchorInfo(link); + + const anchor = elem.find('a'); + const span = elem.find('span'); span.text(linkInfo.title); + if (!link.asDropdown) { anchor.attr('href', linkInfo.href); sanitizeAnchor(); } - elem.find('a').attr('data-placement', 'bottom'); + anchor.attr('data-placement', 'bottom'); // tooltip - elem.find('a').tooltip({ + anchor.tooltip({ title: $sanitize(scope.link.tooltip), html: true, container: 'body', @@ -62,12 +62,13 @@ function dashLink($compile, $sanitize, linkSrv) { } function sanitizeAnchor() { + const anchor = elem.find('a'); const anchorSanitized = $sanitize(anchor.parent().html()); anchor.parent().html(anchorSanitized); } - icon.attr('class', 'fa fa-fw ' + scope.link.icon); - anchor.attr('target', scope.link.target); + elem.find('i').attr('class', 'fa fa-fw ' + scope.link.icon); + elem.find('a').attr('target', scope.link.target); // fix for menus on the far right if (link.asDropdown && scope.$last) { From af434df5a37fcc090a0b7f38d333a87fe2558bb4 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 6 Jul 2018 12:35:19 +0200 Subject: [PATCH 088/263] changelog: add notes about closing #12506 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0177546b643d..cb0b95c211fd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) +* **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) # 5.2.1 (2018-06-29) From cbdf6ef355d6cae33c4d770dcc1a676f53028980 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 6 Jul 2018 12:53:10 +0200 Subject: [PATCH 089/263] changelog: add notes about closing #12506 [skip ci] --- CHANGELOG.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cb0b95c211fd..e478315f1a56 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,8 +14,12 @@ * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) -* **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) +# 5.2.2 (unreleased) + +### Minor + +* **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) # 5.2.1 (2018-06-29) From f2980bb9785243ef999eb7b9ce599b9c861af489 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 6 Jul 2018 12:55:12 +0200 Subject: [PATCH 090/263] changelog: add notes about closing #12484 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e478315f1a56..2c2b4f8cd335 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ * **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps) * **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2) * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) +* **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484) * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) From 7ddcaf22d5a55d4178f16c9157bf68079a9d37ad Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Fri, 6 Jul 2018 15:32:08 +0200 Subject: [PATCH 091/263] Fix datasource sorting with template variables - fixes sorting when template variables are present - simplified existing test cases - added test to cover variable usage in datasource service --- public/app/features/plugins/datasource_srv.ts | 6 ++- .../plugins/specs/datasource_srv.jest.ts | 41 +++++++++++++------ 2 files changed, 32 insertions(+), 15 deletions(-) diff --git a/public/app/features/plugins/datasource_srv.ts b/public/app/features/plugins/datasource_srv.ts index b5e0316163c8..bff6f8b9f6a0 100644 --- a/public/app/features/plugins/datasource_srv.ts +++ b/public/app/features/plugins/datasource_srv.ts @@ -142,10 +142,12 @@ export class DatasourceSrv { var ds = config.datasources[first]; if (ds) { + const key = `$${variable.name}`; list.push({ - name: '$' + variable.name, - value: '$' + variable.name, + name: key, + value: key, meta: ds.meta, + sort: key, }); } } diff --git a/public/app/features/plugins/specs/datasource_srv.jest.ts b/public/app/features/plugins/specs/datasource_srv.jest.ts index f261c4e22498..5458662ef9b6 100644 --- a/public/app/features/plugins/specs/datasource_srv.jest.ts +++ b/public/app/features/plugins/specs/datasource_srv.jest.ts @@ -2,8 +2,21 @@ import config from 'app/core/config'; import 'app/features/plugins/datasource_srv'; import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; +// Datasource variable $datasource with current value 'BBB' +const templateSrv = { + variables: [ + { + type: 'datasource', + name: 'datasource', + current: { + value: 'BBB', + }, + }, + ], +}; + describe('datasource_srv', function() { - let _datasourceSrv = new DatasourceSrv({}, {}, {}, {}); + let _datasourceSrv = new DatasourceSrv({}, {}, {}, templateSrv); let metricSources; describe('when loading metric sources', () => { @@ -35,25 +48,27 @@ describe('datasource_srv', function() { }; beforeEach(() => { config.datasources = unsortedDatasources; - metricSources = _datasourceSrv.getMetricSources({ skipVariables: true }); + metricSources = _datasourceSrv.getMetricSources({}); + config.defaultDatasource = 'BBB'; }); it('should return a list of sources sorted case insensitively with builtin sources last', () => { - expect(metricSources[0].name).toBe('aaa'); - expect(metricSources[1].name).toBe('BBB'); - expect(metricSources[2].name).toBe('mmm'); - expect(metricSources[3].name).toBe('ZZZ'); - expect(metricSources[4].name).toBe('--Grafana--'); - expect(metricSources[5].name).toBe('--Mixed--'); + expect(metricSources[1].name).toBe('aaa'); + expect(metricSources[2].name).toBe('BBB'); + expect(metricSources[3].name).toBe('mmm'); + expect(metricSources[4].name).toBe('ZZZ'); + expect(metricSources[5].name).toBe('--Grafana--'); + expect(metricSources[6].name).toBe('--Mixed--'); }); - beforeEach(() => { - config.defaultDatasource = 'BBB'; + it('should set default data source', () => { + expect(metricSources[3].name).toBe('default'); + expect(metricSources[3].sort).toBe('BBB'); }); - it('should set default data source', () => { - expect(metricSources[2].name).toBe('default'); - expect(metricSources[2].sort).toBe('BBB'); + it('should set default inject the variable datasources', () => { + expect(metricSources[0].name).toBe('$datasource'); + expect(metricSources[0].sort).toBe('$datasource'); }); }); }); From 922371ba76738670d44a44cfcf634d3a0a9ade4c Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 6 Jul 2018 17:48:39 +0200 Subject: [PATCH 092/263] changelog: add notes about closing #12379 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2c2b4f8cd335..1f9cc951e344 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ ### Minor +* **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) # 5.2.1 (2018-06-29) From 645974ec8dd59e876cb73280072561d1b07280fc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Sat, 7 Jul 2018 10:25:41 +0200 Subject: [PATCH 093/263] ux: minor fix/tweak to inactive view mode, think logo should be visible & fixes dashboard title alignment --- public/sass/components/_view_states.scss | 4 ---- 1 file changed, 4 deletions(-) diff --git a/public/sass/components/_view_states.scss b/public/sass/components/_view_states.scss index c14590b4ec9d..5e9d037e9242 100644 --- a/public/sass/components/_view_states.scss +++ b/public/sass/components/_view_states.scss @@ -36,10 +36,6 @@ } } - .sidemenu { - display: none; - } - .gf-timepicker-nav-btn { transform: translate3d(40px, 0, 0); } From b464a28cca12b3f786f91cc425a213e60262a40d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Sat, 7 Jul 2018 14:40:17 +0200 Subject: [PATCH 094/263] devenv: updated devenv provision scripts --- .gitignore | 1 + .../bulk-dashboards.yaml | 0 .../bulkdash.jsonnet | 0 devenv/dashboards.yaml | 9 ++++++ .../dev-dashboards/dev-dashboards.yaml | 9 ------ .../default/default.yaml => datasources.yaml} | 28 +++++++++---------- .../dev-dashboards/dashboard_with_rows.json | 0 devenv/setup.sh | 25 +++++++---------- 8 files changed, 34 insertions(+), 38 deletions(-) rename devenv/{dashboards/bulk-testing => bulk-dashboards}/bulk-dashboards.yaml (100%) rename devenv/{dashboards/bulk-testing => bulk-dashboards}/bulkdash.jsonnet (100%) create mode 100644 devenv/dashboards.yaml delete mode 100644 devenv/dashboards/dev-dashboards/dev-dashboards.yaml rename devenv/{datasources/default/default.yaml => datasources.yaml} (81%) rename devenv/{dashboards => }/dev-dashboards/dashboard_with_rows.json (100%) diff --git a/.gitignore b/.gitignore index accc24d84cdc..11df66360d9c 100644 --- a/.gitignore +++ b/.gitignore @@ -43,6 +43,7 @@ fig.yml docker-compose.yml docker-compose.yaml /conf/provisioning/**/custom.yaml +/conf/provisioning/**/dev.yaml /conf/ldap_dev.toml profile.cov /grafana diff --git a/devenv/dashboards/bulk-testing/bulk-dashboards.yaml b/devenv/bulk-dashboards/bulk-dashboards.yaml similarity index 100% rename from devenv/dashboards/bulk-testing/bulk-dashboards.yaml rename to devenv/bulk-dashboards/bulk-dashboards.yaml diff --git a/devenv/dashboards/bulk-testing/bulkdash.jsonnet b/devenv/bulk-dashboards/bulkdash.jsonnet similarity index 100% rename from devenv/dashboards/bulk-testing/bulkdash.jsonnet rename to devenv/bulk-dashboards/bulkdash.jsonnet diff --git a/devenv/dashboards.yaml b/devenv/dashboards.yaml new file mode 100644 index 000000000000..226c1a8b335d --- /dev/null +++ b/devenv/dashboards.yaml @@ -0,0 +1,9 @@ +apiVersion: 1 + +providers: + - name: 'gdev dashboards' + folder: 'gdev dashboards' + type: file + options: + path: devenv/dev-dashboards + diff --git a/devenv/dashboards/dev-dashboards/dev-dashboards.yaml b/devenv/dashboards/dev-dashboards/dev-dashboards.yaml deleted file mode 100644 index 343910de7387..000000000000 --- a/devenv/dashboards/dev-dashboards/dev-dashboards.yaml +++ /dev/null @@ -1,9 +0,0 @@ -apiVersion: 1 - -providers: - - name: 'dev dashboards' - folder: 'dev dashboards' - type: file - options: - path: devenv/dashboards/dev-dashboards - diff --git a/devenv/datasources/default/default.yaml b/devenv/datasources.yaml similarity index 81% rename from devenv/datasources/default/default.yaml rename to devenv/datasources.yaml index dc2310f15aae..e93c0217f270 100644 --- a/devenv/datasources/default/default.yaml +++ b/devenv/datasources.yaml @@ -1,38 +1,38 @@ apiVersion: 1 datasources: - - name: Graphite + - name: gdev-graphite type: graphite access: proxy url: http://localhost:8080 jsonData: graphiteVersion: "1.1" - - - name: Prometheus + + - name: gdev-prometheus type: prometheus access: proxy isDefault: true url: http://localhost:9090 - - - name: InfluxDB + + - name: gdev-influxdb type: influxdb access: proxy database: site user: grafana password: grafana url: http://localhost:8086 - jsonData: + jsonData: timeInterval: "15s" - - name: OpenTsdb + - name: gdev-opentsdb type: opentsdb access: proxy url: http://localhost:4242 - jsonData: + jsonData: tsdbResolution: 1 tsdbVersion: 1 - - name: Elastic + - name: gdev-elasticsearch-metrics type: elasticsearch access: proxy database: "[metrics-]YYYY.MM.DD" @@ -40,22 +40,22 @@ datasources: jsonData: interval: Daily timeField: "@timestamp" - - - name: MySQL + + - name: gdev-mysql type: mysql url: localhost:3306 database: grafana user: grafana password: password - - name: MSSQL + - name: gdev-mssql type: mssql url: localhost:1433 database: grafana user: grafana password: "Password!" - - name: Postgres + - name: gdev-postgres type: postgres url: localhost:5432 database: grafana @@ -64,7 +64,7 @@ datasources: jsonData: sslmode: "disable" - - name: Cloudwatch + - name: gdev-cloudwatch type: cloudwatch editable: true jsonData: diff --git a/devenv/dashboards/dev-dashboards/dashboard_with_rows.json b/devenv/dev-dashboards/dashboard_with_rows.json similarity index 100% rename from devenv/dashboards/dev-dashboards/dashboard_with_rows.json rename to devenv/dev-dashboards/dashboard_with_rows.json diff --git a/devenv/setup.sh b/devenv/setup.sh index 0900b57e5839..78dbfc1a366e 100755 --- a/devenv/setup.sh +++ b/devenv/setup.sh @@ -23,41 +23,36 @@ requiresJsonnet() { } defaultDashboards() { - requiresJsonnet - - ln -s -f -r ./dashboards/dev-dashboards/dev-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml + ln -s -f ../../../devenv/dashboards.yaml ../conf/provisioning/dashboards/dev.yaml } defaultDatasources() { echo "setting up all default datasources using provisioning" - ln -s -f -r ./datasources/default/default.yaml ../conf/provisioning/datasources/custom.yaml + ln -s -f ../../../devenv/datasources.yaml ../conf/provisioning/datasources/dev.yaml } usage() { - echo -e "install.sh\n\tThis script installs my basic setup for a debian laptop\n" + echo -e "install.sh\n\tThis script setups dev provision for datasources and dashboards" echo "Usage:" echo " bulk-dashboards - create and provisioning 400 dashboards" - echo " default-datasources - provisiong all core datasources" + echo " no args - provisiong core datasources and dev dashboards" } main() { local cmd=$1 - if [[ -z "$cmd" ]]; then - usage - exit 1 - fi - if [[ $cmd == "bulk-dashboards" ]]; then bulkDashboard - elif [[ $cmd == "default-datasources" ]]; then - defaultDatasources - elif [[ $cmd == "default-dashboards" ]]; then - defaultDashboards else + defaultDashboards + defaultDatasources + fi + + if [[ -z "$cmd" ]]; then usage fi + } main "$@" From dca22fd151fd68cdfe4aaafb140470542f11d86a Mon Sep 17 00:00:00 2001 From: Ryan McKinley Date: Sun, 8 Jul 2018 15:48:05 +0100 Subject: [PATCH 095/263] Tabs to spaces in tslint (#12529) * tabs to spaces testing commit permisions :) * revert * tabs to spaces --- tslint.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tslint.json b/tslint.json index e7a512957013..22e123e0364a 100644 --- a/tslint.json +++ b/tslint.json @@ -2,7 +2,7 @@ "rules": { "no-string-throw": true, "no-unused-expression": true, - "no-unused-variable": false, + "no-unused-variable": false, "no-use-before-declare": false, "no-duplicate-variable": true, "curly": true, From f53e1661145732b6af565e00ec6c215c2d9bc0b2 Mon Sep 17 00:00:00 2001 From: Ryan McKinley Date: Sun, 8 Jul 2018 21:06:08 +0100 Subject: [PATCH 096/263] Update lodash/moment version (#12532) * tabs to spaces testing commit permisions :) * revert * Update lodash and moment --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 4f2220abbaec..a43b2adc5be1 100644 --- a/package.json +++ b/package.json @@ -154,12 +154,12 @@ "file-saver": "^1.3.3", "immutable": "^3.8.2", "jquery": "^3.2.1", - "lodash": "^4.17.4", + "lodash": "^4.17.10", "mini-css-extract-plugin": "^0.4.0", "mobx": "^3.4.1", "mobx-react": "^4.3.5", "mobx-state-tree": "^1.3.1", - "moment": "^2.18.1", + "moment": "^2.22.2", "mousetrap": "^1.6.0", "mousetrap-global-bind": "^1.1.0", "optimize-css-assets-webpack-plugin": "^4.0.2", From 664944980a86c7082ce20e007a8789e949543453 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Mon, 16 Apr 2018 15:27:25 +0900 Subject: [PATCH 097/263] update aws-sdk-go --- Gopkg.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Gopkg.toml b/Gopkg.toml index 1768059f0b86..6c91ec372211 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -36,7 +36,7 @@ ignored = [ [[constraint]] name = "github.com/aws/aws-sdk-go" - version = "1.12.65" + version = "1.13.56" [[constraint]] branch = "master" From 077cf9a343dcc2f0d54607776cb998404870d565 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Mon, 16 Apr 2018 15:32:39 +0900 Subject: [PATCH 098/263] dep ensure --- Gopkg.lock | 15 +- .../aws/aws-sdk-go/aws/client/client.go | 4 +- .../aws/aws-sdk-go/aws/client/logger.go | 102 ++- .../aws/client/metadata/client_info.go | 1 + .../aws-sdk-go/aws/credentials/credentials.go | 18 +- .../github.com/aws/aws-sdk-go/aws/csm/doc.go | 46 + .../aws/aws-sdk-go/aws/csm/enable.go | 67 ++ .../aws/aws-sdk-go/aws/csm/metric.go | 51 ++ .../aws/aws-sdk-go/aws/csm/metricChan.go | 54 ++ .../aws/aws-sdk-go/aws/csm/reporter.go | 230 +++++ .../aws/aws-sdk-go/aws/endpoints/defaults.go | 75 +- .../github.com/aws/aws-sdk-go/aws/logger.go | 6 + .../aws/aws-sdk-go/aws/request/handlers.go | 18 + .../aws/aws-sdk-go/aws/request/request.go | 9 +- .../aws/aws-sdk-go/aws/request/request_1_7.go | 2 +- .../aws/aws-sdk-go/aws/request/request_1_8.go | 2 +- .../aws/request/request_pagination.go | 15 +- .../aws/aws-sdk-go/aws/session/env_config.go | 20 + .../aws/aws-sdk-go/aws/session/session.go | 26 +- .../aws/aws-sdk-go/aws/signer/v4/v4.go | 13 +- .../github.com/aws/aws-sdk-go/aws/version.go | 2 +- .../private/protocol/eventstream/debug.go | 144 ++++ .../private/protocol/eventstream/decode.go | 199 +++++ .../private/protocol/eventstream/encode.go | 114 +++ .../private/protocol/eventstream/error.go | 23 + .../eventstream/eventstreamapi/api.go | 160 ++++ .../eventstream/eventstreamapi/error.go | 24 + .../private/protocol/eventstream/header.go | 166 ++++ .../protocol/eventstream/header_value.go | 501 +++++++++++ .../private/protocol/eventstream/message.go | 103 +++ .../aws-sdk-go/private/protocol/payload.go | 81 ++ .../aws-sdk-go/private/protocol/rest/build.go | 8 +- .../private/protocol/rest/unmarshal.go | 2 +- .../aws-sdk-go/service/cloudwatch/service.go | 6 +- .../aws/aws-sdk-go/service/ec2/api.go | 94 ++- .../aws/aws-sdk-go/service/ec2/service.go | 6 +- .../aws/aws-sdk-go/service/s3/api.go | 796 ++++++++++++++++++ .../aws/aws-sdk-go/service/s3/service.go | 8 +- .../aws/aws-sdk-go/service/sts/service.go | 6 +- .../shurcooL/sanitized_anchor_name/LICENSE | 21 + .../shurcooL/sanitized_anchor_name/main.go | 29 + 41 files changed, 3183 insertions(+), 84 deletions(-) create mode 100644 vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go create mode 100644 vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go create mode 100644 vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go create mode 100644 vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go create mode 100644 vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go create mode 100644 vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go create mode 100644 vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE create mode 100644 vendor/github.com/shurcooL/sanitized_anchor_name/main.go diff --git a/Gopkg.lock b/Gopkg.lock index 5acaf2a542ce..6f08e208ecd0 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -32,6 +32,7 @@ "aws/credentials/ec2rolecreds", "aws/credentials/endpointcreds", "aws/credentials/stscreds", + "aws/csm", "aws/defaults", "aws/ec2metadata", "aws/endpoints", @@ -43,6 +44,8 @@ "internal/shareddefaults", "private/protocol", "private/protocol/ec2query", + "private/protocol/eventstream", + "private/protocol/eventstream/eventstreamapi", "private/protocol/query", "private/protocol/query/queryutil", "private/protocol/rest", @@ -54,8 +57,8 @@ "service/s3", "service/sts" ] - revision = "c7cd1ebe87257cde9b65112fc876b0339ea0ac30" - version = "v1.13.49" + revision = "fde4ded7becdeae4d26bf1212916aabba79349b4" + version = "v1.14.12" [[projects]] branch = "master" @@ -424,6 +427,12 @@ revision = "1744e2970ca51c86172c8190fadad617561ed6e7" version = "v1.0.0" +[[projects]] + branch = "master" + name = "github.com/shurcooL/sanitized_anchor_name" + packages = ["."] + revision = "86672fcb3f950f35f2e675df2240550f2a50762f" + [[projects]] name = "github.com/smartystreets/assertions" packages = [ @@ -670,6 +679,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "85cc057e0cc074ab5b43bd620772d63d51e07b04e8782fcfe55e6929d2fc40f7" + inputs-digest = "cb8e7fd81f23ec987fc4d5dd9d31ae0f1164bc2f30cbea2fe86e0d97dd945beb" solver-name = "gps-cdcl" solver-version = 1 diff --git a/vendor/github.com/aws/aws-sdk-go/aws/client/client.go b/vendor/github.com/aws/aws-sdk-go/aws/client/client.go index 3271a18e80e1..212fe25e71e1 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/client/client.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/client/client.go @@ -91,6 +91,6 @@ func (c *Client) AddDebugHandlers() { return } - c.Handlers.Send.PushFrontNamed(request.NamedHandler{Name: "awssdk.client.LogRequest", Fn: logRequest}) - c.Handlers.Send.PushBackNamed(request.NamedHandler{Name: "awssdk.client.LogResponse", Fn: logResponse}) + c.Handlers.Send.PushFrontNamed(LogHTTPRequestHandler) + c.Handlers.Send.PushBackNamed(LogHTTPResponseHandler) } diff --git a/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go b/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go index e223c54cc6c1..ce9fb896d943 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/client/logger.go @@ -44,12 +44,22 @@ func (reader *teeReaderCloser) Close() error { return reader.Source.Close() } +// LogHTTPRequestHandler is a SDK request handler to log the HTTP request sent +// to a service. Will include the HTTP request body if the LogLevel of the +// request matches LogDebugWithHTTPBody. +var LogHTTPRequestHandler = request.NamedHandler{ + Name: "awssdk.client.LogRequest", + Fn: logRequest, +} + func logRequest(r *request.Request) { logBody := r.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody) bodySeekable := aws.IsReaderSeekable(r.Body) - dumpedBody, err := httputil.DumpRequestOut(r.HTTPRequest, logBody) + + b, err := httputil.DumpRequestOut(r.HTTPRequest, logBody) if err != nil { - r.Config.Logger.Log(fmt.Sprintf(logReqErrMsg, r.ClientInfo.ServiceName, r.Operation.Name, err)) + r.Config.Logger.Log(fmt.Sprintf(logReqErrMsg, + r.ClientInfo.ServiceName, r.Operation.Name, err)) return } @@ -63,7 +73,28 @@ func logRequest(r *request.Request) { r.ResetBody() } - r.Config.Logger.Log(fmt.Sprintf(logReqMsg, r.ClientInfo.ServiceName, r.Operation.Name, string(dumpedBody))) + r.Config.Logger.Log(fmt.Sprintf(logReqMsg, + r.ClientInfo.ServiceName, r.Operation.Name, string(b))) +} + +// LogHTTPRequestHeaderHandler is a SDK request handler to log the HTTP request sent +// to a service. Will only log the HTTP request's headers. The request payload +// will not be read. +var LogHTTPRequestHeaderHandler = request.NamedHandler{ + Name: "awssdk.client.LogRequestHeader", + Fn: logRequestHeader, +} + +func logRequestHeader(r *request.Request) { + b, err := httputil.DumpRequestOut(r.HTTPRequest, false) + if err != nil { + r.Config.Logger.Log(fmt.Sprintf(logReqErrMsg, + r.ClientInfo.ServiceName, r.Operation.Name, err)) + return + } + + r.Config.Logger.Log(fmt.Sprintf(logReqMsg, + r.ClientInfo.ServiceName, r.Operation.Name, string(b))) } const logRespMsg = `DEBUG: Response %s/%s Details: @@ -76,27 +107,44 @@ const logRespErrMsg = `DEBUG ERROR: Response %s/%s: %s -----------------------------------------------------` +// LogHTTPResponseHandler is a SDK request handler to log the HTTP response +// received from a service. Will include the HTTP response body if the LogLevel +// of the request matches LogDebugWithHTTPBody. +var LogHTTPResponseHandler = request.NamedHandler{ + Name: "awssdk.client.LogResponse", + Fn: logResponse, +} + func logResponse(r *request.Request) { lw := &logWriter{r.Config.Logger, bytes.NewBuffer(nil)} - r.HTTPResponse.Body = &teeReaderCloser{ - Reader: io.TeeReader(r.HTTPResponse.Body, lw), - Source: r.HTTPResponse.Body, + + logBody := r.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody) + if logBody { + r.HTTPResponse.Body = &teeReaderCloser{ + Reader: io.TeeReader(r.HTTPResponse.Body, lw), + Source: r.HTTPResponse.Body, + } } handlerFn := func(req *request.Request) { - body, err := httputil.DumpResponse(req.HTTPResponse, false) + b, err := httputil.DumpResponse(req.HTTPResponse, false) if err != nil { - lw.Logger.Log(fmt.Sprintf(logRespErrMsg, req.ClientInfo.ServiceName, req.Operation.Name, err)) + lw.Logger.Log(fmt.Sprintf(logRespErrMsg, + req.ClientInfo.ServiceName, req.Operation.Name, err)) return } - b, err := ioutil.ReadAll(lw.buf) - if err != nil { - lw.Logger.Log(fmt.Sprintf(logRespErrMsg, req.ClientInfo.ServiceName, req.Operation.Name, err)) - return - } - lw.Logger.Log(fmt.Sprintf(logRespMsg, req.ClientInfo.ServiceName, req.Operation.Name, string(body))) - if req.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody) { + lw.Logger.Log(fmt.Sprintf(logRespMsg, + req.ClientInfo.ServiceName, req.Operation.Name, string(b))) + + if logBody { + b, err := ioutil.ReadAll(lw.buf) + if err != nil { + lw.Logger.Log(fmt.Sprintf(logRespErrMsg, + req.ClientInfo.ServiceName, req.Operation.Name, err)) + return + } + lw.Logger.Log(string(b)) } } @@ -110,3 +158,27 @@ func logResponse(r *request.Request) { Name: handlerName, Fn: handlerFn, }) } + +// LogHTTPResponseHeaderHandler is a SDK request handler to log the HTTP +// response received from a service. Will only log the HTTP response's headers. +// The response payload will not be read. +var LogHTTPResponseHeaderHandler = request.NamedHandler{ + Name: "awssdk.client.LogResponseHeader", + Fn: logResponseHeader, +} + +func logResponseHeader(r *request.Request) { + if r.Config.Logger == nil { + return + } + + b, err := httputil.DumpResponse(r.HTTPResponse, false) + if err != nil { + r.Config.Logger.Log(fmt.Sprintf(logRespErrMsg, + r.ClientInfo.ServiceName, r.Operation.Name, err)) + return + } + + r.Config.Logger.Log(fmt.Sprintf(logRespMsg, + r.ClientInfo.ServiceName, r.Operation.Name, string(b))) +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go b/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go index 4778056ddfda..920e9fddf870 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/client/metadata/client_info.go @@ -3,6 +3,7 @@ package metadata // ClientInfo wraps immutable data from the client.Client structure. type ClientInfo struct { ServiceName string + ServiceID string APIVersion string Endpoint string SigningName string diff --git a/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go b/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go index 42416fc2f0fc..ed086992f62f 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/credentials/credentials.go @@ -178,7 +178,8 @@ func (e *Expiry) IsExpired() bool { type Credentials struct { creds Value forceRefresh bool - m sync.Mutex + + m sync.RWMutex provider Provider } @@ -201,6 +202,17 @@ func NewCredentials(provider Provider) *Credentials { // If Credentials.Expire() was called the credentials Value will be force // expired, and the next call to Get() will cause them to be refreshed. func (c *Credentials) Get() (Value, error) { + // Check the cached credentials first with just the read lock. + c.m.RLock() + if !c.isExpired() { + creds := c.creds + c.m.RUnlock() + return creds, nil + } + c.m.RUnlock() + + // Credentials are expired need to retrieve the credentials taking the full + // lock. c.m.Lock() defer c.m.Unlock() @@ -234,8 +246,8 @@ func (c *Credentials) Expire() { // If the Credentials were forced to be expired with Expire() this will // reflect that override. func (c *Credentials) IsExpired() bool { - c.m.Lock() - defer c.m.Unlock() + c.m.RLock() + defer c.m.RUnlock() return c.isExpired() } diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go new file mode 100644 index 000000000000..152d785b362b --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/doc.go @@ -0,0 +1,46 @@ +// Package csm provides Client Side Monitoring (CSM) which enables sending metrics +// via UDP connection. Using the Start function will enable the reporting of +// metrics on a given port. If Start is called, with different parameters, again, +// a panic will occur. +// +// Pause can be called to pause any metrics publishing on a given port. Sessions +// that have had their handlers modified via InjectHandlers may still be used. +// However, the handlers will act as a no-op meaning no metrics will be published. +// +// Example: +// r, err := csm.Start("clientID", ":31000") +// if err != nil { +// panic(fmt.Errorf("failed starting CSM: %v", err)) +// } +// +// sess, err := session.NewSession(&aws.Config{}) +// if err != nil { +// panic(fmt.Errorf("failed loading session: %v", err)) +// } +// +// r.InjectHandlers(&sess.Handlers) +// +// client := s3.New(sess) +// resp, err := client.GetObject(&s3.GetObjectInput{ +// Bucket: aws.String("bucket"), +// Key: aws.String("key"), +// }) +// +// // Will pause monitoring +// r.Pause() +// resp, err = client.GetObject(&s3.GetObjectInput{ +// Bucket: aws.String("bucket"), +// Key: aws.String("key"), +// }) +// +// // Resume monitoring +// r.Continue() +// +// Start returns a Reporter that is used to enable or disable monitoring. If +// access to the Reporter is required later, calling Get will return the Reporter +// singleton. +// +// Example: +// r := csm.Get() +// r.Continue() +package csm diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go new file mode 100644 index 000000000000..2f0c6eac9a80 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/enable.go @@ -0,0 +1,67 @@ +package csm + +import ( + "fmt" + "sync" +) + +var ( + lock sync.Mutex +) + +// Client side metric handler names +const ( + APICallMetricHandlerName = "awscsm.SendAPICallMetric" + APICallAttemptMetricHandlerName = "awscsm.SendAPICallAttemptMetric" +) + +// Start will start the a long running go routine to capture +// client side metrics. Calling start multiple time will only +// start the metric listener once and will panic if a different +// client ID or port is passed in. +// +// Example: +// r, err := csm.Start("clientID", "127.0.0.1:8094") +// if err != nil { +// panic(fmt.Errorf("expected no error, but received %v", err)) +// } +// sess := session.NewSession() +// r.InjectHandlers(sess.Handlers) +// +// svc := s3.New(sess) +// out, err := svc.GetObject(&s3.GetObjectInput{ +// Bucket: aws.String("bucket"), +// Key: aws.String("key"), +// }) +func Start(clientID string, url string) (*Reporter, error) { + lock.Lock() + defer lock.Unlock() + + if sender == nil { + sender = newReporter(clientID, url) + } else { + if sender.clientID != clientID { + panic(fmt.Errorf("inconsistent client IDs. %q was expected, but received %q", sender.clientID, clientID)) + } + + if sender.url != url { + panic(fmt.Errorf("inconsistent URLs. %q was expected, but received %q", sender.url, url)) + } + } + + if err := connect(url); err != nil { + sender = nil + return nil, err + } + + return sender, nil +} + +// Get will return a reporter if one exists, if one does not exist, nil will +// be returned. +func Get() *Reporter { + lock.Lock() + defer lock.Unlock() + + return sender +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go new file mode 100644 index 000000000000..4b0d630e4c1e --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/metric.go @@ -0,0 +1,51 @@ +package csm + +import ( + "strconv" + "time" +) + +type metricTime time.Time + +func (t metricTime) MarshalJSON() ([]byte, error) { + ns := time.Duration(time.Time(t).UnixNano()) + return []byte(strconv.FormatInt(int64(ns/time.Millisecond), 10)), nil +} + +type metric struct { + ClientID *string `json:"ClientId,omitempty"` + API *string `json:"Api,omitempty"` + Service *string `json:"Service,omitempty"` + Timestamp *metricTime `json:"Timestamp,omitempty"` + Type *string `json:"Type,omitempty"` + Version *int `json:"Version,omitempty"` + + AttemptCount *int `json:"AttemptCount,omitempty"` + Latency *int `json:"Latency,omitempty"` + + Fqdn *string `json:"Fqdn,omitempty"` + UserAgent *string `json:"UserAgent,omitempty"` + AttemptLatency *int `json:"AttemptLatency,omitempty"` + + SessionToken *string `json:"SessionToken,omitempty"` + Region *string `json:"Region,omitempty"` + AccessKey *string `json:"AccessKey,omitempty"` + HTTPStatusCode *int `json:"HttpStatusCode,omitempty"` + XAmzID2 *string `json:"XAmzId2,omitempty"` + XAmzRequestID *string `json:"XAmznRequestId,omitempty"` + + AWSException *string `json:"AwsException,omitempty"` + AWSExceptionMessage *string `json:"AwsExceptionMessage,omitempty"` + SDKException *string `json:"SdkException,omitempty"` + SDKExceptionMessage *string `json:"SdkExceptionMessage,omitempty"` + + DestinationIP *string `json:"DestinationIp,omitempty"` + ConnectionReused *int `json:"ConnectionReused,omitempty"` + + AcquireConnectionLatency *int `json:"AcquireConnectionLatency,omitempty"` + ConnectLatency *int `json:"ConnectLatency,omitempty"` + RequestLatency *int `json:"RequestLatency,omitempty"` + DNSLatency *int `json:"DnsLatency,omitempty"` + TCPLatency *int `json:"TcpLatency,omitempty"` + SSLLatency *int `json:"SslLatency,omitempty"` +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go new file mode 100644 index 000000000000..514fc3739a5f --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/metricChan.go @@ -0,0 +1,54 @@ +package csm + +import ( + "sync/atomic" +) + +const ( + runningEnum = iota + pausedEnum +) + +var ( + // MetricsChannelSize of metrics to hold in the channel + MetricsChannelSize = 100 +) + +type metricChan struct { + ch chan metric + paused int64 +} + +func newMetricChan(size int) metricChan { + return metricChan{ + ch: make(chan metric, size), + } +} + +func (ch *metricChan) Pause() { + atomic.StoreInt64(&ch.paused, pausedEnum) +} + +func (ch *metricChan) Continue() { + atomic.StoreInt64(&ch.paused, runningEnum) +} + +func (ch *metricChan) IsPaused() bool { + v := atomic.LoadInt64(&ch.paused) + return v == pausedEnum +} + +// Push will push metrics to the metric channel if the channel +// is not paused +func (ch *metricChan) Push(m metric) bool { + if ch.IsPaused() { + return false + } + + select { + case ch.ch <- m: + return true + default: + return false + } +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go b/vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go new file mode 100644 index 000000000000..1484c8fc5b19 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/aws/csm/reporter.go @@ -0,0 +1,230 @@ +package csm + +import ( + "encoding/json" + "net" + "time" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" + "github.com/aws/aws-sdk-go/aws/request" +) + +const ( + // DefaultPort is used when no port is specified + DefaultPort = "31000" +) + +// Reporter will gather metrics of API requests made and +// send those metrics to the CSM endpoint. +type Reporter struct { + clientID string + url string + conn net.Conn + metricsCh metricChan + done chan struct{} +} + +var ( + sender *Reporter +) + +func connect(url string) error { + const network = "udp" + if err := sender.connect(network, url); err != nil { + return err + } + + if sender.done == nil { + sender.done = make(chan struct{}) + go sender.start() + } + + return nil +} + +func newReporter(clientID, url string) *Reporter { + return &Reporter{ + clientID: clientID, + url: url, + metricsCh: newMetricChan(MetricsChannelSize), + } +} + +func (rep *Reporter) sendAPICallAttemptMetric(r *request.Request) { + if rep == nil { + return + } + + now := time.Now() + creds, _ := r.Config.Credentials.Get() + + m := metric{ + ClientID: aws.String(rep.clientID), + API: aws.String(r.Operation.Name), + Service: aws.String(r.ClientInfo.ServiceID), + Timestamp: (*metricTime)(&now), + UserAgent: aws.String(r.HTTPRequest.Header.Get("User-Agent")), + Region: r.Config.Region, + Type: aws.String("ApiCallAttempt"), + Version: aws.Int(1), + + XAmzRequestID: aws.String(r.RequestID), + + AttemptCount: aws.Int(r.RetryCount + 1), + AttemptLatency: aws.Int(int(now.Sub(r.AttemptTime).Nanoseconds() / int64(time.Millisecond))), + AccessKey: aws.String(creds.AccessKeyID), + } + + if r.HTTPResponse != nil { + m.HTTPStatusCode = aws.Int(r.HTTPResponse.StatusCode) + } + + if r.Error != nil { + if awserr, ok := r.Error.(awserr.Error); ok { + setError(&m, awserr) + } + } + + rep.metricsCh.Push(m) +} + +func setError(m *metric, err awserr.Error) { + msg := err.Message() + code := err.Code() + + switch code { + case "RequestError", + "SerializationError", + request.CanceledErrorCode: + + m.SDKException = &code + m.SDKExceptionMessage = &msg + default: + m.AWSException = &code + m.AWSExceptionMessage = &msg + } +} + +func (rep *Reporter) sendAPICallMetric(r *request.Request) { + if rep == nil { + return + } + + now := time.Now() + m := metric{ + ClientID: aws.String(rep.clientID), + API: aws.String(r.Operation.Name), + Service: aws.String(r.ClientInfo.ServiceID), + Timestamp: (*metricTime)(&now), + Type: aws.String("ApiCall"), + AttemptCount: aws.Int(r.RetryCount + 1), + Latency: aws.Int(int(time.Now().Sub(r.Time) / time.Millisecond)), + XAmzRequestID: aws.String(r.RequestID), + } + + // TODO: Probably want to figure something out for logging dropped + // metrics + rep.metricsCh.Push(m) +} + +func (rep *Reporter) connect(network, url string) error { + if rep.conn != nil { + rep.conn.Close() + } + + conn, err := net.Dial(network, url) + if err != nil { + return awserr.New("UDPError", "Could not connect", err) + } + + rep.conn = conn + + return nil +} + +func (rep *Reporter) close() { + if rep.done != nil { + close(rep.done) + } + + rep.metricsCh.Pause() +} + +func (rep *Reporter) start() { + defer func() { + rep.metricsCh.Pause() + }() + + for { + select { + case <-rep.done: + rep.done = nil + return + case m := <-rep.metricsCh.ch: + // TODO: What to do with this error? Probably should just log + b, err := json.Marshal(m) + if err != nil { + continue + } + + rep.conn.Write(b) + } + } +} + +// Pause will pause the metric channel preventing any new metrics from +// being added. +func (rep *Reporter) Pause() { + lock.Lock() + defer lock.Unlock() + + if rep == nil { + return + } + + rep.close() +} + +// Continue will reopen the metric channel and allow for monitoring +// to be resumed. +func (rep *Reporter) Continue() { + lock.Lock() + defer lock.Unlock() + if rep == nil { + return + } + + if !rep.metricsCh.IsPaused() { + return + } + + rep.metricsCh.Continue() +} + +// InjectHandlers will will enable client side metrics and inject the proper +// handlers to handle how metrics are sent. +// +// Example: +// // Start must be called in order to inject the correct handlers +// r, err := csm.Start("clientID", "127.0.0.1:8094") +// if err != nil { +// panic(fmt.Errorf("expected no error, but received %v", err)) +// } +// +// sess := session.NewSession() +// r.InjectHandlers(&sess.Handlers) +// +// // create a new service client with our client side metric session +// svc := s3.New(sess) +func (rep *Reporter) InjectHandlers(handlers *request.Handlers) { + if rep == nil { + return + } + + apiCallHandler := request.NamedHandler{Name: APICallMetricHandlerName, Fn: rep.sendAPICallMetric} + handlers.Complete.PushFrontNamed(apiCallHandler) + + apiCallAttemptHandler := request.NamedHandler{Name: APICallAttemptMetricHandlerName, Fn: rep.sendAPICallAttemptMetric} + handlers.AfterRetry.PushFrontNamed(apiCallAttemptHandler) +} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go b/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go index 857f677dd106..c472a57fad2f 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go @@ -48,6 +48,7 @@ const ( A4bServiceID = "a4b" // A4b. AcmServiceID = "acm" // Acm. AcmPcaServiceID = "acm-pca" // AcmPca. + ApiMediatailorServiceID = "api.mediatailor" // ApiMediatailor. ApiPricingServiceID = "api.pricing" // ApiPricing. ApigatewayServiceID = "apigateway" // Apigateway. ApplicationAutoscalingServiceID = "application-autoscaling" // ApplicationAutoscaling. @@ -130,6 +131,7 @@ const ( ModelsLexServiceID = "models.lex" // ModelsLex. MonitoringServiceID = "monitoring" // Monitoring. MturkRequesterServiceID = "mturk-requester" // MturkRequester. + NeptuneServiceID = "neptune" // Neptune. OpsworksServiceID = "opsworks" // Opsworks. OpsworksCmServiceID = "opsworks-cm" // OpsworksCm. OrganizationsServiceID = "organizations" // Organizations. @@ -307,6 +309,16 @@ var awsPartition = partition{ "us-west-2": endpoint{}, }, }, + "api.mediatailor": service{ + + Endpoints: endpoints{ + "ap-northeast-1": endpoint{}, + "ap-southeast-1": endpoint{}, + "ap-southeast-2": endpoint{}, + "eu-west-1": endpoint{}, + "us-east-1": endpoint{}, + }, + }, "api.pricing": service{ Defaults: endpoint{ CredentialScope: credentialScope{ @@ -434,6 +446,7 @@ var awsPartition = partition{ Endpoints: endpoints{ "ap-northeast-1": endpoint{}, "ap-northeast-2": endpoint{}, + "ap-south-1": endpoint{}, "ap-southeast-1": endpoint{}, "ap-southeast-2": endpoint{}, "ca-central-1": endpoint{}, @@ -1046,6 +1059,7 @@ var awsPartition = partition{ "elasticfilesystem": service{ Endpoints: endpoints{ + "ap-northeast-2": endpoint{}, "ap-southeast-2": endpoint{}, "eu-central-1": endpoint{}, "eu-west-1": endpoint{}, @@ -1242,11 +1256,13 @@ var awsPartition = partition{ Endpoints: endpoints{ "ap-northeast-1": endpoint{}, + "ap-northeast-2": endpoint{}, "ap-south-1": endpoint{}, "ap-southeast-1": endpoint{}, "ap-southeast-2": endpoint{}, "eu-central-1": endpoint{}, "eu-west-1": endpoint{}, + "eu-west-2": endpoint{}, "us-east-1": endpoint{}, "us-east-2": endpoint{}, "us-west-2": endpoint{}, @@ -1509,8 +1525,10 @@ var awsPartition = partition{ Endpoints: endpoints{ "ap-northeast-1": endpoint{}, + "ap-northeast-2": endpoint{}, "ap-southeast-1": endpoint{}, "ap-southeast-2": endpoint{}, + "eu-central-1": endpoint{}, "eu-west-1": endpoint{}, "us-east-1": endpoint{}, "us-west-2": endpoint{}, @@ -1622,6 +1640,35 @@ var awsPartition = partition{ "us-east-1": endpoint{}, }, }, + "neptune": service{ + + Endpoints: endpoints{ + "eu-west-1": endpoint{ + Hostname: "rds.eu-west-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "eu-west-1", + }, + }, + "us-east-1": endpoint{ + Hostname: "rds.us-east-1.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-east-1", + }, + }, + "us-east-2": endpoint{ + Hostname: "rds.us-east-2.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-east-2", + }, + }, + "us-west-2": endpoint{ + Hostname: "rds.us-west-2.amazonaws.com", + CredentialScope: credentialScope{ + Region: "us-west-2", + }, + }, + }, + }, "opsworks": service{ Endpoints: endpoints{ @@ -1805,10 +1852,11 @@ var awsPartition = partition{ "runtime.sagemaker": service{ Endpoints: endpoints{ - "eu-west-1": endpoint{}, - "us-east-1": endpoint{}, - "us-east-2": endpoint{}, - "us-west-2": endpoint{}, + "ap-northeast-1": endpoint{}, + "eu-west-1": endpoint{}, + "us-east-1": endpoint{}, + "us-east-2": endpoint{}, + "us-west-2": endpoint{}, }, }, "s3": service{ @@ -1873,10 +1921,11 @@ var awsPartition = partition{ "sagemaker": service{ Endpoints: endpoints{ - "eu-west-1": endpoint{}, - "us-east-1": endpoint{}, - "us-east-2": endpoint{}, - "us-west-2": endpoint{}, + "ap-northeast-1": endpoint{}, + "eu-west-1": endpoint{}, + "us-east-1": endpoint{}, + "us-east-2": endpoint{}, + "us-west-2": endpoint{}, }, }, "sdb": service{ @@ -2081,6 +2130,10 @@ var awsPartition = partition{ "eu-west-1": endpoint{}, "eu-west-2": endpoint{}, "eu-west-3": endpoint{}, + "fips-us-east-1": endpoint{}, + "fips-us-east-2": endpoint{}, + "fips-us-west-1": endpoint{}, + "fips-us-west-2": endpoint{}, "sa-east-1": endpoint{}, "us-east-1": endpoint{ SSLCommonName: "queue.{dnsSuffix}", @@ -2507,13 +2560,15 @@ var awscnPartition = partition{ "ecr": service{ Endpoints: endpoints{ - "cn-north-1": endpoint{}, + "cn-north-1": endpoint{}, + "cn-northwest-1": endpoint{}, }, }, "ecs": service{ Endpoints: endpoints{ - "cn-north-1": endpoint{}, + "cn-north-1": endpoint{}, + "cn-northwest-1": endpoint{}, }, }, "elasticache": service{ diff --git a/vendor/github.com/aws/aws-sdk-go/aws/logger.go b/vendor/github.com/aws/aws-sdk-go/aws/logger.go index 3babb5abdb69..6ed15b2ecc26 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/logger.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/logger.go @@ -71,6 +71,12 @@ const ( // LogDebugWithRequestErrors states the SDK should log when service requests fail // to build, send, validate, or unmarshal. LogDebugWithRequestErrors + + // LogDebugWithEventStreamBody states the SDK should log EventStream + // request and response bodys. This should be used to log the EventStream + // wire unmarshaled message content of requests and responses made while + // using the SDK Will also enable LogDebug. + LogDebugWithEventStreamBody ) // A Logger is a minimalistic interface for the SDK to log messages to. Should diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go b/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go index 802ac88ad5cd..605a72d3c940 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/handlers.go @@ -14,6 +14,7 @@ type Handlers struct { Send HandlerList ValidateResponse HandlerList Unmarshal HandlerList + UnmarshalStream HandlerList UnmarshalMeta HandlerList UnmarshalError HandlerList Retry HandlerList @@ -30,6 +31,7 @@ func (h *Handlers) Copy() Handlers { Send: h.Send.copy(), ValidateResponse: h.ValidateResponse.copy(), Unmarshal: h.Unmarshal.copy(), + UnmarshalStream: h.UnmarshalStream.copy(), UnmarshalError: h.UnmarshalError.copy(), UnmarshalMeta: h.UnmarshalMeta.copy(), Retry: h.Retry.copy(), @@ -45,6 +47,7 @@ func (h *Handlers) Clear() { h.Send.Clear() h.Sign.Clear() h.Unmarshal.Clear() + h.UnmarshalStream.Clear() h.UnmarshalMeta.Clear() h.UnmarshalError.Clear() h.ValidateResponse.Clear() @@ -172,6 +175,21 @@ func (l *HandlerList) SwapNamed(n NamedHandler) (swapped bool) { return swapped } +// Swap will swap out all handlers matching the name passed in. The matched +// handlers will be swapped in. True is returned if the handlers were swapped. +func (l *HandlerList) Swap(name string, replace NamedHandler) bool { + var swapped bool + + for i := 0; i < len(l.list); i++ { + if l.list[i].Name == name { + l.list[i] = replace + swapped = true + } + } + + return swapped +} + // SetBackNamed will replace the named handler if it exists in the handler list. // If the handler does not exist the handler will be added to the end of the list. func (l *HandlerList) SetBackNamed(n NamedHandler) { diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request.go index 69b7a01ad74a..75f0fe07780a 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request.go @@ -46,6 +46,7 @@ type Request struct { Handlers Handlers Retryer + AttemptTime time.Time Time time.Time Operation *Operation HTTPRequest *http.Request @@ -121,6 +122,7 @@ func New(cfg aws.Config, clientInfo metadata.ClientInfo, handlers Handlers, Handlers: handlers.Copy(), Retryer: retryer, + AttemptTime: time.Now(), Time: time.Now(), ExpireTime: 0, Operation: operation, @@ -368,9 +370,9 @@ func (r *Request) Build() error { return r.Error } -// Sign will sign the request returning error if errors are encountered. +// Sign will sign the request, returning error if errors are encountered. // -// Send will build the request prior to signing. All Sign Handlers will +// Sign will build the request prior to signing. All Sign Handlers will // be executed in the order they were set. func (r *Request) Sign() error { r.Build() @@ -440,7 +442,7 @@ func (r *Request) GetBody() io.ReadSeeker { return r.safeBody } -// Send will send the request returning error if errors are encountered. +// Send will send the request, returning error if errors are encountered. // // Send will sign the request prior to sending. All Send Handlers will // be executed in the order they were set. @@ -461,6 +463,7 @@ func (r *Request) Send() error { }() for { + r.AttemptTime = time.Now() if aws.BoolValue(r.Retryable) { if r.Config.LogLevel.Matches(aws.LogDebugWithRequestRetries) { r.Config.Logger.Log(fmt.Sprintf("DEBUG: Retrying Request %s/%s, attempt %d", diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go index 869b97a1a0fa..e36e468b7c61 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_7.go @@ -21,7 +21,7 @@ func (noBody) WriteTo(io.Writer) (int64, error) { return 0, nil } var NoBody = noBody{} // ResetBody rewinds the request body back to its starting position, and -// set's the HTTP Request body reference. When the body is read prior +// sets the HTTP Request body reference. When the body is read prior // to being sent in the HTTP request it will need to be rewound. // // ResetBody will automatically be called by the SDK's build handler, but if diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go index c32fc69bc56f..7c6a8000f675 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request_1_8.go @@ -11,7 +11,7 @@ import ( var NoBody = http.NoBody // ResetBody rewinds the request body back to its starting position, and -// set's the HTTP Request body reference. When the body is read prior +// sets the HTTP Request body reference. When the body is read prior // to being sent in the HTTP request it will need to be rewound. // // ResetBody will automatically be called by the SDK's build handler, but if diff --git a/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go b/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go index 159518a75cda..a633ed5acfa3 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/request/request_pagination.go @@ -35,8 +35,12 @@ type Pagination struct { // NewRequest should always be built from the same API operations. It is // undefined if different API operations are returned on subsequent calls. NewRequest func() (*Request, error) + // EndPageOnSameToken, when enabled, will allow the paginator to stop on + // token that are the same as its previous tokens. + EndPageOnSameToken bool started bool + prevTokens []interface{} nextTokens []interface{} err error @@ -49,7 +53,15 @@ type Pagination struct { // // Will always return true if Next has not been called yet. func (p *Pagination) HasNextPage() bool { - return !(p.started && len(p.nextTokens) == 0) + if !p.started { + return true + } + + hasNextPage := len(p.nextTokens) != 0 + if p.EndPageOnSameToken { + return hasNextPage && !awsutil.DeepEqual(p.nextTokens, p.prevTokens) + } + return hasNextPage } // Err returns the error Pagination encountered when retrieving the next page. @@ -96,6 +108,7 @@ func (p *Pagination) Next() bool { return false } + p.prevTokens = p.nextTokens p.nextTokens = req.nextPageTokens() p.curPage = req.Data diff --git a/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go b/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go index 12b452177a8b..82e04d76cdeb 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/session/env_config.go @@ -96,9 +96,23 @@ type envConfig struct { // // AWS_CA_BUNDLE=$HOME/my_custom_ca_bundle CustomCABundle string + + csmEnabled string + CSMEnabled bool + CSMPort string + CSMClientID string } var ( + csmEnabledEnvKey = []string{ + "AWS_CSM_ENABLED", + } + csmPortEnvKey = []string{ + "AWS_CSM_PORT", + } + csmClientIDEnvKey = []string{ + "AWS_CSM_CLIENT_ID", + } credAccessEnvKey = []string{ "AWS_ACCESS_KEY_ID", "AWS_ACCESS_KEY", @@ -157,6 +171,12 @@ func envConfigLoad(enableSharedConfig bool) envConfig { setFromEnvVal(&cfg.Creds.SecretAccessKey, credSecretEnvKey) setFromEnvVal(&cfg.Creds.SessionToken, credSessionEnvKey) + // CSM environment variables + setFromEnvVal(&cfg.csmEnabled, csmEnabledEnvKey) + setFromEnvVal(&cfg.CSMPort, csmPortEnvKey) + setFromEnvVal(&cfg.CSMClientID, csmClientIDEnvKey) + cfg.CSMEnabled = len(cfg.csmEnabled) > 0 + // Require logical grouping of credentials if len(cfg.Creds.AccessKeyID) == 0 || len(cfg.Creds.SecretAccessKey) == 0 { cfg.Creds = credentials.Value{} diff --git a/vendor/github.com/aws/aws-sdk-go/aws/session/session.go b/vendor/github.com/aws/aws-sdk-go/aws/session/session.go index 259b5c0fecc0..51f30556301f 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/session/session.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/session/session.go @@ -15,6 +15,7 @@ import ( "github.com/aws/aws-sdk-go/aws/corehandlers" "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/credentials/stscreds" + "github.com/aws/aws-sdk-go/aws/csm" "github.com/aws/aws-sdk-go/aws/defaults" "github.com/aws/aws-sdk-go/aws/endpoints" "github.com/aws/aws-sdk-go/aws/request" @@ -81,10 +82,16 @@ func New(cfgs ...*aws.Config) *Session { r.Error = err }) } + return s } - return deprecatedNewSession(cfgs...) + s := deprecatedNewSession(cfgs...) + if envCfg.CSMEnabled { + enableCSM(&s.Handlers, envCfg.CSMClientID, envCfg.CSMPort, s.Config.Logger) + } + + return s } // NewSession returns a new Session created from SDK defaults, config files, @@ -300,10 +307,22 @@ func deprecatedNewSession(cfgs ...*aws.Config) *Session { } initHandlers(s) - return s } +func enableCSM(handlers *request.Handlers, clientID string, port string, logger aws.Logger) { + logger.Log("Enabling CSM") + if len(port) == 0 { + port = csm.DefaultPort + } + + r, err := csm.Start(clientID, "127.0.0.1:"+port) + if err != nil { + return + } + r.InjectHandlers(handlers) +} + func newSession(opts Options, envCfg envConfig, cfgs ...*aws.Config) (*Session, error) { cfg := defaults.Config() handlers := defaults.Handlers() @@ -343,6 +362,9 @@ func newSession(opts Options, envCfg envConfig, cfgs ...*aws.Config) (*Session, } initHandlers(s) + if envCfg.CSMEnabled { + enableCSM(&s.Handlers, envCfg.CSMClientID, envCfg.CSMPort, s.Config.Logger) + } // Setup HTTP client with custom cert bundle if enabled if opts.CustomCABundle != nil { diff --git a/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go b/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go index 6e46376125bc..f3586131538c 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/signer/v4/v4.go @@ -135,6 +135,7 @@ var requiredSignedHeaders = rules{ "X-Amz-Server-Side-Encryption-Customer-Key-Md5": struct{}{}, "X-Amz-Storage-Class": struct{}{}, "X-Amz-Website-Redirect-Location": struct{}{}, + "X-Amz-Content-Sha256": struct{}{}, }, }, patterns{"X-Amz-Meta-"}, @@ -671,8 +672,15 @@ func (ctx *signingCtx) buildSignature() { func (ctx *signingCtx) buildBodyDigest() error { hash := ctx.Request.Header.Get("X-Amz-Content-Sha256") if hash == "" { - if ctx.unsignedPayload || (ctx.isPresign && ctx.ServiceName == "s3") { + includeSHA256Header := ctx.unsignedPayload || + ctx.ServiceName == "s3" || + ctx.ServiceName == "glacier" + + s3Presign := ctx.isPresign && ctx.ServiceName == "s3" + + if ctx.unsignedPayload || s3Presign { hash = "UNSIGNED-PAYLOAD" + includeSHA256Header = !s3Presign } else if ctx.Body == nil { hash = emptyStringSHA256 } else { @@ -681,7 +689,8 @@ func (ctx *signingCtx) buildBodyDigest() error { } hash = hex.EncodeToString(makeSha256Reader(ctx.Body)) } - if ctx.unsignedPayload || ctx.ServiceName == "s3" || ctx.ServiceName == "glacier" { + + if includeSHA256Header { ctx.Request.Header.Set("X-Amz-Content-Sha256", hash) } } diff --git a/vendor/github.com/aws/aws-sdk-go/aws/version.go b/vendor/github.com/aws/aws-sdk-go/aws/version.go index befbff7df07d..c108466609e9 100644 --- a/vendor/github.com/aws/aws-sdk-go/aws/version.go +++ b/vendor/github.com/aws/aws-sdk-go/aws/version.go @@ -5,4 +5,4 @@ package aws const SDKName = "aws-sdk-go" // SDKVersion is the version of this SDK -const SDKVersion = "1.13.49" +const SDKVersion = "1.14.12" diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go new file mode 100644 index 000000000000..ecc7bf82fa20 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/debug.go @@ -0,0 +1,144 @@ +package eventstream + +import ( + "bytes" + "encoding/base64" + "encoding/json" + "fmt" + "strconv" +) + +type decodedMessage struct { + rawMessage + Headers decodedHeaders `json:"headers"` +} +type jsonMessage struct { + Length json.Number `json:"total_length"` + HeadersLen json.Number `json:"headers_length"` + PreludeCRC json.Number `json:"prelude_crc"` + Headers decodedHeaders `json:"headers"` + Payload []byte `json:"payload"` + CRC json.Number `json:"message_crc"` +} + +func (d *decodedMessage) UnmarshalJSON(b []byte) (err error) { + var jsonMsg jsonMessage + if err = json.Unmarshal(b, &jsonMsg); err != nil { + return err + } + + d.Length, err = numAsUint32(jsonMsg.Length) + if err != nil { + return err + } + d.HeadersLen, err = numAsUint32(jsonMsg.HeadersLen) + if err != nil { + return err + } + d.PreludeCRC, err = numAsUint32(jsonMsg.PreludeCRC) + if err != nil { + return err + } + d.Headers = jsonMsg.Headers + d.Payload = jsonMsg.Payload + d.CRC, err = numAsUint32(jsonMsg.CRC) + if err != nil { + return err + } + + return nil +} + +func (d *decodedMessage) MarshalJSON() ([]byte, error) { + jsonMsg := jsonMessage{ + Length: json.Number(strconv.Itoa(int(d.Length))), + HeadersLen: json.Number(strconv.Itoa(int(d.HeadersLen))), + PreludeCRC: json.Number(strconv.Itoa(int(d.PreludeCRC))), + Headers: d.Headers, + Payload: d.Payload, + CRC: json.Number(strconv.Itoa(int(d.CRC))), + } + + return json.Marshal(jsonMsg) +} + +func numAsUint32(n json.Number) (uint32, error) { + v, err := n.Int64() + if err != nil { + return 0, fmt.Errorf("failed to get int64 json number, %v", err) + } + + return uint32(v), nil +} + +func (d decodedMessage) Message() Message { + return Message{ + Headers: Headers(d.Headers), + Payload: d.Payload, + } +} + +type decodedHeaders Headers + +func (hs *decodedHeaders) UnmarshalJSON(b []byte) error { + var jsonHeaders []struct { + Name string `json:"name"` + Type valueType `json:"type"` + Value interface{} `json:"value"` + } + + decoder := json.NewDecoder(bytes.NewReader(b)) + decoder.UseNumber() + if err := decoder.Decode(&jsonHeaders); err != nil { + return err + } + + var headers Headers + for _, h := range jsonHeaders { + value, err := valueFromType(h.Type, h.Value) + if err != nil { + return err + } + headers.Set(h.Name, value) + } + (*hs) = decodedHeaders(headers) + + return nil +} + +func valueFromType(typ valueType, val interface{}) (Value, error) { + switch typ { + case trueValueType: + return BoolValue(true), nil + case falseValueType: + return BoolValue(false), nil + case int8ValueType: + v, err := val.(json.Number).Int64() + return Int8Value(int8(v)), err + case int16ValueType: + v, err := val.(json.Number).Int64() + return Int16Value(int16(v)), err + case int32ValueType: + v, err := val.(json.Number).Int64() + return Int32Value(int32(v)), err + case int64ValueType: + v, err := val.(json.Number).Int64() + return Int64Value(v), err + case bytesValueType: + v, err := base64.StdEncoding.DecodeString(val.(string)) + return BytesValue(v), err + case stringValueType: + v, err := base64.StdEncoding.DecodeString(val.(string)) + return StringValue(string(v)), err + case timestampValueType: + v, err := val.(json.Number).Int64() + return TimestampValue(timeFromEpochMilli(v)), err + case uuidValueType: + v, err := base64.StdEncoding.DecodeString(val.(string)) + var tv UUIDValue + copy(tv[:], v) + return tv, err + default: + panic(fmt.Sprintf("unknown type, %s, %T", typ.String(), val)) + } +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go new file mode 100644 index 000000000000..4b972b2d6664 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/decode.go @@ -0,0 +1,199 @@ +package eventstream + +import ( + "bytes" + "encoding/binary" + "encoding/hex" + "encoding/json" + "fmt" + "hash" + "hash/crc32" + "io" + + "github.com/aws/aws-sdk-go/aws" +) + +// Decoder provides decoding of an Event Stream messages. +type Decoder struct { + r io.Reader + logger aws.Logger +} + +// NewDecoder initializes and returns a Decoder for decoding event +// stream messages from the reader provided. +func NewDecoder(r io.Reader) *Decoder { + return &Decoder{ + r: r, + } +} + +// Decode attempts to decode a single message from the event stream reader. +// Will return the event stream message, or error if Decode fails to read +// the message from the stream. +func (d *Decoder) Decode(payloadBuf []byte) (m Message, err error) { + reader := d.r + if d.logger != nil { + debugMsgBuf := bytes.NewBuffer(nil) + reader = io.TeeReader(reader, debugMsgBuf) + defer func() { + logMessageDecode(d.logger, debugMsgBuf, m, err) + }() + } + + crc := crc32.New(crc32IEEETable) + hashReader := io.TeeReader(reader, crc) + + prelude, err := decodePrelude(hashReader, crc) + if err != nil { + return Message{}, err + } + + if prelude.HeadersLen > 0 { + lr := io.LimitReader(hashReader, int64(prelude.HeadersLen)) + m.Headers, err = decodeHeaders(lr) + if err != nil { + return Message{}, err + } + } + + if payloadLen := prelude.PayloadLen(); payloadLen > 0 { + buf, err := decodePayload(payloadBuf, io.LimitReader(hashReader, int64(payloadLen))) + if err != nil { + return Message{}, err + } + m.Payload = buf + } + + msgCRC := crc.Sum32() + if err := validateCRC(reader, msgCRC); err != nil { + return Message{}, err + } + + return m, nil +} + +// UseLogger specifies the Logger that that the decoder should use to log the +// message decode to. +func (d *Decoder) UseLogger(logger aws.Logger) { + d.logger = logger +} + +func logMessageDecode(logger aws.Logger, msgBuf *bytes.Buffer, msg Message, decodeErr error) { + w := bytes.NewBuffer(nil) + defer func() { logger.Log(w.String()) }() + + fmt.Fprintf(w, "Raw message:\n%s\n", + hex.Dump(msgBuf.Bytes())) + + if decodeErr != nil { + fmt.Fprintf(w, "Decode error: %v\n", decodeErr) + return + } + + rawMsg, err := msg.rawMessage() + if err != nil { + fmt.Fprintf(w, "failed to create raw message, %v\n", err) + return + } + + decodedMsg := decodedMessage{ + rawMessage: rawMsg, + Headers: decodedHeaders(msg.Headers), + } + + fmt.Fprintf(w, "Decoded message:\n") + encoder := json.NewEncoder(w) + if err := encoder.Encode(decodedMsg); err != nil { + fmt.Fprintf(w, "failed to generate decoded message, %v\n", err) + } +} + +func decodePrelude(r io.Reader, crc hash.Hash32) (messagePrelude, error) { + var p messagePrelude + + var err error + p.Length, err = decodeUint32(r) + if err != nil { + return messagePrelude{}, err + } + + p.HeadersLen, err = decodeUint32(r) + if err != nil { + return messagePrelude{}, err + } + + if err := p.ValidateLens(); err != nil { + return messagePrelude{}, err + } + + preludeCRC := crc.Sum32() + if err := validateCRC(r, preludeCRC); err != nil { + return messagePrelude{}, err + } + + p.PreludeCRC = preludeCRC + + return p, nil +} + +func decodePayload(buf []byte, r io.Reader) ([]byte, error) { + w := bytes.NewBuffer(buf[0:0]) + + _, err := io.Copy(w, r) + return w.Bytes(), err +} + +func decodeUint8(r io.Reader) (uint8, error) { + type byteReader interface { + ReadByte() (byte, error) + } + + if br, ok := r.(byteReader); ok { + v, err := br.ReadByte() + return uint8(v), err + } + + var b [1]byte + _, err := io.ReadFull(r, b[:]) + return uint8(b[0]), err +} +func decodeUint16(r io.Reader) (uint16, error) { + var b [2]byte + bs := b[:] + _, err := io.ReadFull(r, bs) + if err != nil { + return 0, err + } + return binary.BigEndian.Uint16(bs), nil +} +func decodeUint32(r io.Reader) (uint32, error) { + var b [4]byte + bs := b[:] + _, err := io.ReadFull(r, bs) + if err != nil { + return 0, err + } + return binary.BigEndian.Uint32(bs), nil +} +func decodeUint64(r io.Reader) (uint64, error) { + var b [8]byte + bs := b[:] + _, err := io.ReadFull(r, bs) + if err != nil { + return 0, err + } + return binary.BigEndian.Uint64(bs), nil +} + +func validateCRC(r io.Reader, expect uint32) error { + msgCRC, err := decodeUint32(r) + if err != nil { + return err + } + + if msgCRC != expect { + return ChecksumError{} + } + + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go new file mode 100644 index 000000000000..150a60981d83 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/encode.go @@ -0,0 +1,114 @@ +package eventstream + +import ( + "bytes" + "encoding/binary" + "hash" + "hash/crc32" + "io" +) + +// Encoder provides EventStream message encoding. +type Encoder struct { + w io.Writer + + headersBuf *bytes.Buffer +} + +// NewEncoder initializes and returns an Encoder to encode Event Stream +// messages to an io.Writer. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + w: w, + headersBuf: bytes.NewBuffer(nil), + } +} + +// Encode encodes a single EventStream message to the io.Writer the Encoder +// was created with. An error is returned if writing the message fails. +func (e *Encoder) Encode(msg Message) error { + e.headersBuf.Reset() + + err := encodeHeaders(e.headersBuf, msg.Headers) + if err != nil { + return err + } + + crc := crc32.New(crc32IEEETable) + hashWriter := io.MultiWriter(e.w, crc) + + headersLen := uint32(e.headersBuf.Len()) + payloadLen := uint32(len(msg.Payload)) + + if err := encodePrelude(hashWriter, crc, headersLen, payloadLen); err != nil { + return err + } + + if headersLen > 0 { + if _, err := io.Copy(hashWriter, e.headersBuf); err != nil { + return err + } + } + + if payloadLen > 0 { + if _, err := hashWriter.Write(msg.Payload); err != nil { + return err + } + } + + msgCRC := crc.Sum32() + return binary.Write(e.w, binary.BigEndian, msgCRC) +} + +func encodePrelude(w io.Writer, crc hash.Hash32, headersLen, payloadLen uint32) error { + p := messagePrelude{ + Length: minMsgLen + headersLen + payloadLen, + HeadersLen: headersLen, + } + if err := p.ValidateLens(); err != nil { + return err + } + + err := binaryWriteFields(w, binary.BigEndian, + p.Length, + p.HeadersLen, + ) + if err != nil { + return err + } + + p.PreludeCRC = crc.Sum32() + err = binary.Write(w, binary.BigEndian, p.PreludeCRC) + if err != nil { + return err + } + + return nil +} + +func encodeHeaders(w io.Writer, headers Headers) error { + for _, h := range headers { + hn := headerName{ + Len: uint8(len(h.Name)), + } + copy(hn.Name[:hn.Len], h.Name) + if err := hn.encode(w); err != nil { + return err + } + + if err := h.Value.encode(w); err != nil { + return err + } + } + + return nil +} + +func binaryWriteFields(w io.Writer, order binary.ByteOrder, vs ...interface{}) error { + for _, v := range vs { + if err := binary.Write(w, order, v); err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go new file mode 100644 index 000000000000..5481ef30796d --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/error.go @@ -0,0 +1,23 @@ +package eventstream + +import "fmt" + +// LengthError provides the error for items being larger than a maximum length. +type LengthError struct { + Part string + Want int + Have int + Value interface{} +} + +func (e LengthError) Error() string { + return fmt.Sprintf("%s length invalid, %d/%d, %v", + e.Part, e.Want, e.Have, e.Value) +} + +// ChecksumError provides the error for message checksum invalidation errors. +type ChecksumError struct{} + +func (e ChecksumError) Error() string { + return "message checksum mismatch" +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go new file mode 100644 index 000000000000..4a4e64c713ed --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/api.go @@ -0,0 +1,160 @@ +package eventstreamapi + +import ( + "fmt" + "io" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/private/protocol" + "github.com/aws/aws-sdk-go/private/protocol/eventstream" +) + +// Unmarshaler provides the interface for unmarshaling a EventStream +// message into a SDK type. +type Unmarshaler interface { + UnmarshalEvent(protocol.PayloadUnmarshaler, eventstream.Message) error +} + +// EventStream headers with specific meaning to async API functionality. +const ( + MessageTypeHeader = `:message-type` // Identifies type of message. + EventMessageType = `event` + ErrorMessageType = `error` + ExceptionMessageType = `exception` + + // Message Events + EventTypeHeader = `:event-type` // Identifies message event type e.g. "Stats". + + // Message Error + ErrorCodeHeader = `:error-code` + ErrorMessageHeader = `:error-message` + + // Message Exception + ExceptionTypeHeader = `:exception-type` +) + +// EventReader provides reading from the EventStream of an reader. +type EventReader struct { + reader io.ReadCloser + decoder *eventstream.Decoder + + unmarshalerForEventType func(string) (Unmarshaler, error) + payloadUnmarshaler protocol.PayloadUnmarshaler + + payloadBuf []byte +} + +// NewEventReader returns a EventReader built from the reader and unmarshaler +// provided. Use ReadStream method to start reading from the EventStream. +func NewEventReader( + reader io.ReadCloser, + payloadUnmarshaler protocol.PayloadUnmarshaler, + unmarshalerForEventType func(string) (Unmarshaler, error), +) *EventReader { + return &EventReader{ + reader: reader, + decoder: eventstream.NewDecoder(reader), + payloadUnmarshaler: payloadUnmarshaler, + unmarshalerForEventType: unmarshalerForEventType, + payloadBuf: make([]byte, 10*1024), + } +} + +// UseLogger instructs the EventReader to use the logger and log level +// specified. +func (r *EventReader) UseLogger(logger aws.Logger, logLevel aws.LogLevelType) { + if logger != nil && logLevel.Matches(aws.LogDebugWithEventStreamBody) { + r.decoder.UseLogger(logger) + } +} + +// ReadEvent attempts to read a message from the EventStream and return the +// unmarshaled event value that the message is for. +// +// For EventStream API errors check if the returned error satisfies the +// awserr.Error interface to get the error's Code and Message components. +// +// EventUnmarshalers called with EventStream messages must take copies of the +// message's Payload. The payload will is reused between events read. +func (r *EventReader) ReadEvent() (event interface{}, err error) { + msg, err := r.decoder.Decode(r.payloadBuf) + if err != nil { + return nil, err + } + defer func() { + // Reclaim payload buffer for next message read. + r.payloadBuf = msg.Payload[0:0] + }() + + typ, err := GetHeaderString(msg, MessageTypeHeader) + if err != nil { + return nil, err + } + + switch typ { + case EventMessageType: + return r.unmarshalEventMessage(msg) + case ErrorMessageType: + return nil, r.unmarshalErrorMessage(msg) + default: + return nil, fmt.Errorf("unknown eventstream message type, %v", typ) + } +} + +func (r *EventReader) unmarshalEventMessage( + msg eventstream.Message, +) (event interface{}, err error) { + eventType, err := GetHeaderString(msg, EventTypeHeader) + if err != nil { + return nil, err + } + + ev, err := r.unmarshalerForEventType(eventType) + if err != nil { + return nil, err + } + + err = ev.UnmarshalEvent(r.payloadUnmarshaler, msg) + if err != nil { + return nil, err + } + + return ev, nil +} + +func (r *EventReader) unmarshalErrorMessage(msg eventstream.Message) (err error) { + var msgErr messageError + + msgErr.code, err = GetHeaderString(msg, ErrorCodeHeader) + if err != nil { + return err + } + + msgErr.msg, err = GetHeaderString(msg, ErrorMessageHeader) + if err != nil { + return err + } + + return msgErr +} + +// Close closes the EventReader's EventStream reader. +func (r *EventReader) Close() error { + return r.reader.Close() +} + +// GetHeaderString returns the value of the header as a string. If the header +// is not set or the value is not a string an error will be returned. +func GetHeaderString(msg eventstream.Message, headerName string) (string, error) { + headerVal := msg.Headers.Get(headerName) + if headerVal == nil { + return "", fmt.Errorf("error header %s not present", headerName) + } + + v, ok := headerVal.Get().(string) + if !ok { + return "", fmt.Errorf("error header value is not a string, %T", headerVal) + } + + return v, nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go new file mode 100644 index 000000000000..5ea5a988b63e --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi/error.go @@ -0,0 +1,24 @@ +package eventstreamapi + +import "fmt" + +type messageError struct { + code string + msg string +} + +func (e messageError) Code() string { + return e.code +} + +func (e messageError) Message() string { + return e.msg +} + +func (e messageError) Error() string { + return fmt.Sprintf("%s: %s", e.code, e.msg) +} + +func (e messageError) OrigErr() error { + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go new file mode 100644 index 000000000000..3b44dde2f323 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header.go @@ -0,0 +1,166 @@ +package eventstream + +import ( + "encoding/binary" + "fmt" + "io" +) + +// Headers are a collection of EventStream header values. +type Headers []Header + +// Header is a single EventStream Key Value header pair. +type Header struct { + Name string + Value Value +} + +// Set associates the name with a value. If the header name already exists in +// the Headers the value will be replaced with the new one. +func (hs *Headers) Set(name string, value Value) { + var i int + for ; i < len(*hs); i++ { + if (*hs)[i].Name == name { + (*hs)[i].Value = value + return + } + } + + *hs = append(*hs, Header{ + Name: name, Value: value, + }) +} + +// Get returns the Value associated with the header. Nil is returned if the +// value does not exist. +func (hs Headers) Get(name string) Value { + for i := 0; i < len(hs); i++ { + if h := hs[i]; h.Name == name { + return h.Value + } + } + return nil +} + +// Del deletes the value in the Headers if it exists. +func (hs *Headers) Del(name string) { + for i := 0; i < len(*hs); i++ { + if (*hs)[i].Name == name { + copy((*hs)[i:], (*hs)[i+1:]) + (*hs) = (*hs)[:len(*hs)-1] + } + } +} + +func decodeHeaders(r io.Reader) (Headers, error) { + hs := Headers{} + + for { + name, err := decodeHeaderName(r) + if err != nil { + if err == io.EOF { + // EOF while getting header name means no more headers + break + } + return nil, err + } + + value, err := decodeHeaderValue(r) + if err != nil { + return nil, err + } + + hs.Set(name, value) + } + + return hs, nil +} + +func decodeHeaderName(r io.Reader) (string, error) { + var n headerName + + var err error + n.Len, err = decodeUint8(r) + if err != nil { + return "", err + } + + name := n.Name[:n.Len] + if _, err := io.ReadFull(r, name); err != nil { + return "", err + } + + return string(name), nil +} + +func decodeHeaderValue(r io.Reader) (Value, error) { + var raw rawValue + + typ, err := decodeUint8(r) + if err != nil { + return nil, err + } + raw.Type = valueType(typ) + + var v Value + + switch raw.Type { + case trueValueType: + v = BoolValue(true) + case falseValueType: + v = BoolValue(false) + case int8ValueType: + var tv Int8Value + err = tv.decode(r) + v = tv + case int16ValueType: + var tv Int16Value + err = tv.decode(r) + v = tv + case int32ValueType: + var tv Int32Value + err = tv.decode(r) + v = tv + case int64ValueType: + var tv Int64Value + err = tv.decode(r) + v = tv + case bytesValueType: + var tv BytesValue + err = tv.decode(r) + v = tv + case stringValueType: + var tv StringValue + err = tv.decode(r) + v = tv + case timestampValueType: + var tv TimestampValue + err = tv.decode(r) + v = tv + case uuidValueType: + var tv UUIDValue + err = tv.decode(r) + v = tv + default: + panic(fmt.Sprintf("unknown value type %d", raw.Type)) + } + + // Error could be EOF, let caller deal with it + return v, err +} + +const maxHeaderNameLen = 255 + +type headerName struct { + Len uint8 + Name [maxHeaderNameLen]byte +} + +func (v headerName) encode(w io.Writer) error { + if err := binary.Write(w, binary.BigEndian, v.Len); err != nil { + return err + } + + _, err := w.Write(v.Name[:v.Len]) + return err +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go new file mode 100644 index 000000000000..d7786f92ce5c --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/header_value.go @@ -0,0 +1,501 @@ +package eventstream + +import ( + "encoding/base64" + "encoding/binary" + "fmt" + "io" + "strconv" + "time" +) + +const maxHeaderValueLen = 1<<15 - 1 // 2^15-1 or 32KB - 1 + +// valueType is the EventStream header value type. +type valueType uint8 + +// Header value types +const ( + trueValueType valueType = iota + falseValueType + int8ValueType // Byte + int16ValueType // Short + int32ValueType // Integer + int64ValueType // Long + bytesValueType + stringValueType + timestampValueType + uuidValueType +) + +func (t valueType) String() string { + switch t { + case trueValueType: + return "bool" + case falseValueType: + return "bool" + case int8ValueType: + return "int8" + case int16ValueType: + return "int16" + case int32ValueType: + return "int32" + case int64ValueType: + return "int64" + case bytesValueType: + return "byte_array" + case stringValueType: + return "string" + case timestampValueType: + return "timestamp" + case uuidValueType: + return "uuid" + default: + return fmt.Sprintf("unknown value type %d", uint8(t)) + } +} + +type rawValue struct { + Type valueType + Len uint16 // Only set for variable length slices + Value []byte // byte representation of value, BigEndian encoding. +} + +func (r rawValue) encodeScalar(w io.Writer, v interface{}) error { + return binaryWriteFields(w, binary.BigEndian, + r.Type, + v, + ) +} + +func (r rawValue) encodeFixedSlice(w io.Writer, v []byte) error { + binary.Write(w, binary.BigEndian, r.Type) + + _, err := w.Write(v) + return err +} + +func (r rawValue) encodeBytes(w io.Writer, v []byte) error { + if len(v) > maxHeaderValueLen { + return LengthError{ + Part: "header value", + Want: maxHeaderValueLen, Have: len(v), + Value: v, + } + } + r.Len = uint16(len(v)) + + err := binaryWriteFields(w, binary.BigEndian, + r.Type, + r.Len, + ) + if err != nil { + return err + } + + _, err = w.Write(v) + return err +} + +func (r rawValue) encodeString(w io.Writer, v string) error { + if len(v) > maxHeaderValueLen { + return LengthError{ + Part: "header value", + Want: maxHeaderValueLen, Have: len(v), + Value: v, + } + } + r.Len = uint16(len(v)) + + type stringWriter interface { + WriteString(string) (int, error) + } + + err := binaryWriteFields(w, binary.BigEndian, + r.Type, + r.Len, + ) + if err != nil { + return err + } + + if sw, ok := w.(stringWriter); ok { + _, err = sw.WriteString(v) + } else { + _, err = w.Write([]byte(v)) + } + + return err +} + +func decodeFixedBytesValue(r io.Reader, buf []byte) error { + _, err := io.ReadFull(r, buf) + return err +} + +func decodeBytesValue(r io.Reader) ([]byte, error) { + var raw rawValue + var err error + raw.Len, err = decodeUint16(r) + if err != nil { + return nil, err + } + + buf := make([]byte, raw.Len) + _, err = io.ReadFull(r, buf) + if err != nil { + return nil, err + } + + return buf, nil +} + +func decodeStringValue(r io.Reader) (string, error) { + v, err := decodeBytesValue(r) + return string(v), err +} + +// Value represents the abstract header value. +type Value interface { + Get() interface{} + String() string + valueType() valueType + encode(io.Writer) error +} + +// An BoolValue provides eventstream encoding, and representation +// of a Go bool value. +type BoolValue bool + +// Get returns the underlying type +func (v BoolValue) Get() interface{} { + return bool(v) +} + +// valueType returns the EventStream header value type value. +func (v BoolValue) valueType() valueType { + if v { + return trueValueType + } + return falseValueType +} + +func (v BoolValue) String() string { + return strconv.FormatBool(bool(v)) +} + +// encode encodes the BoolValue into an eventstream binary value +// representation. +func (v BoolValue) encode(w io.Writer) error { + return binary.Write(w, binary.BigEndian, v.valueType()) +} + +// An Int8Value provides eventstream encoding, and representation of a Go +// int8 value. +type Int8Value int8 + +// Get returns the underlying value. +func (v Int8Value) Get() interface{} { + return int8(v) +} + +// valueType returns the EventStream header value type value. +func (Int8Value) valueType() valueType { + return int8ValueType +} + +func (v Int8Value) String() string { + return fmt.Sprintf("0x%02x", int8(v)) +} + +// encode encodes the Int8Value into an eventstream binary value +// representation. +func (v Int8Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeScalar(w, v) +} + +func (v *Int8Value) decode(r io.Reader) error { + n, err := decodeUint8(r) + if err != nil { + return err + } + + *v = Int8Value(n) + return nil +} + +// An Int16Value provides eventstream encoding, and representation of a Go +// int16 value. +type Int16Value int16 + +// Get returns the underlying value. +func (v Int16Value) Get() interface{} { + return int16(v) +} + +// valueType returns the EventStream header value type value. +func (Int16Value) valueType() valueType { + return int16ValueType +} + +func (v Int16Value) String() string { + return fmt.Sprintf("0x%04x", int16(v)) +} + +// encode encodes the Int16Value into an eventstream binary value +// representation. +func (v Int16Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + return raw.encodeScalar(w, v) +} + +func (v *Int16Value) decode(r io.Reader) error { + n, err := decodeUint16(r) + if err != nil { + return err + } + + *v = Int16Value(n) + return nil +} + +// An Int32Value provides eventstream encoding, and representation of a Go +// int32 value. +type Int32Value int32 + +// Get returns the underlying value. +func (v Int32Value) Get() interface{} { + return int32(v) +} + +// valueType returns the EventStream header value type value. +func (Int32Value) valueType() valueType { + return int32ValueType +} + +func (v Int32Value) String() string { + return fmt.Sprintf("0x%08x", int32(v)) +} + +// encode encodes the Int32Value into an eventstream binary value +// representation. +func (v Int32Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + return raw.encodeScalar(w, v) +} + +func (v *Int32Value) decode(r io.Reader) error { + n, err := decodeUint32(r) + if err != nil { + return err + } + + *v = Int32Value(n) + return nil +} + +// An Int64Value provides eventstream encoding, and representation of a Go +// int64 value. +type Int64Value int64 + +// Get returns the underlying value. +func (v Int64Value) Get() interface{} { + return int64(v) +} + +// valueType returns the EventStream header value type value. +func (Int64Value) valueType() valueType { + return int64ValueType +} + +func (v Int64Value) String() string { + return fmt.Sprintf("0x%016x", int64(v)) +} + +// encode encodes the Int64Value into an eventstream binary value +// representation. +func (v Int64Value) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + return raw.encodeScalar(w, v) +} + +func (v *Int64Value) decode(r io.Reader) error { + n, err := decodeUint64(r) + if err != nil { + return err + } + + *v = Int64Value(n) + return nil +} + +// An BytesValue provides eventstream encoding, and representation of a Go +// byte slice. +type BytesValue []byte + +// Get returns the underlying value. +func (v BytesValue) Get() interface{} { + return []byte(v) +} + +// valueType returns the EventStream header value type value. +func (BytesValue) valueType() valueType { + return bytesValueType +} + +func (v BytesValue) String() string { + return base64.StdEncoding.EncodeToString([]byte(v)) +} + +// encode encodes the BytesValue into an eventstream binary value +// representation. +func (v BytesValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeBytes(w, []byte(v)) +} + +func (v *BytesValue) decode(r io.Reader) error { + buf, err := decodeBytesValue(r) + if err != nil { + return err + } + + *v = BytesValue(buf) + return nil +} + +// An StringValue provides eventstream encoding, and representation of a Go +// string. +type StringValue string + +// Get returns the underlying value. +func (v StringValue) Get() interface{} { + return string(v) +} + +// valueType returns the EventStream header value type value. +func (StringValue) valueType() valueType { + return stringValueType +} + +func (v StringValue) String() string { + return string(v) +} + +// encode encodes the StringValue into an eventstream binary value +// representation. +func (v StringValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeString(w, string(v)) +} + +func (v *StringValue) decode(r io.Reader) error { + s, err := decodeStringValue(r) + if err != nil { + return err + } + + *v = StringValue(s) + return nil +} + +// An TimestampValue provides eventstream encoding, and representation of a Go +// timestamp. +type TimestampValue time.Time + +// Get returns the underlying value. +func (v TimestampValue) Get() interface{} { + return time.Time(v) +} + +// valueType returns the EventStream header value type value. +func (TimestampValue) valueType() valueType { + return timestampValueType +} + +func (v TimestampValue) epochMilli() int64 { + nano := time.Time(v).UnixNano() + msec := nano / int64(time.Millisecond) + return msec +} + +func (v TimestampValue) String() string { + msec := v.epochMilli() + return strconv.FormatInt(msec, 10) +} + +// encode encodes the TimestampValue into an eventstream binary value +// representation. +func (v TimestampValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + msec := v.epochMilli() + return raw.encodeScalar(w, msec) +} + +func (v *TimestampValue) decode(r io.Reader) error { + n, err := decodeUint64(r) + if err != nil { + return err + } + + *v = TimestampValue(timeFromEpochMilli(int64(n))) + return nil +} + +func timeFromEpochMilli(t int64) time.Time { + secs := t / 1e3 + msec := t % 1e3 + return time.Unix(secs, msec*int64(time.Millisecond)) +} + +// An UUIDValue provides eventstream encoding, and representation of a UUID +// value. +type UUIDValue [16]byte + +// Get returns the underlying value. +func (v UUIDValue) Get() interface{} { + return v[:] +} + +// valueType returns the EventStream header value type value. +func (UUIDValue) valueType() valueType { + return uuidValueType +} + +func (v UUIDValue) String() string { + return fmt.Sprintf(`%X-%X-%X-%X-%X`, v[0:4], v[4:6], v[6:8], v[8:10], v[10:]) +} + +// encode encodes the UUIDValue into an eventstream binary value +// representation. +func (v UUIDValue) encode(w io.Writer) error { + raw := rawValue{ + Type: v.valueType(), + } + + return raw.encodeFixedSlice(w, v[:]) +} + +func (v *UUIDValue) decode(r io.Reader) error { + tv := (*v)[:] + return decodeFixedBytesValue(r, tv) +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go new file mode 100644 index 000000000000..2dc012a66e29 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/eventstream/message.go @@ -0,0 +1,103 @@ +package eventstream + +import ( + "bytes" + "encoding/binary" + "hash/crc32" +) + +const preludeLen = 8 +const preludeCRCLen = 4 +const msgCRCLen = 4 +const minMsgLen = preludeLen + preludeCRCLen + msgCRCLen +const maxPayloadLen = 1024 * 1024 * 16 // 16MB +const maxHeadersLen = 1024 * 128 // 128KB +const maxMsgLen = minMsgLen + maxHeadersLen + maxPayloadLen + +var crc32IEEETable = crc32.MakeTable(crc32.IEEE) + +// A Message provides the eventstream message representation. +type Message struct { + Headers Headers + Payload []byte +} + +func (m *Message) rawMessage() (rawMessage, error) { + var raw rawMessage + + if len(m.Headers) > 0 { + var headers bytes.Buffer + if err := encodeHeaders(&headers, m.Headers); err != nil { + return rawMessage{}, err + } + raw.Headers = headers.Bytes() + raw.HeadersLen = uint32(len(raw.Headers)) + } + + raw.Length = raw.HeadersLen + uint32(len(m.Payload)) + minMsgLen + + hash := crc32.New(crc32IEEETable) + binaryWriteFields(hash, binary.BigEndian, raw.Length, raw.HeadersLen) + raw.PreludeCRC = hash.Sum32() + + binaryWriteFields(hash, binary.BigEndian, raw.PreludeCRC) + + if raw.HeadersLen > 0 { + hash.Write(raw.Headers) + } + + // Read payload bytes and update hash for it as well. + if len(m.Payload) > 0 { + raw.Payload = m.Payload + hash.Write(raw.Payload) + } + + raw.CRC = hash.Sum32() + + return raw, nil +} + +type messagePrelude struct { + Length uint32 + HeadersLen uint32 + PreludeCRC uint32 +} + +func (p messagePrelude) PayloadLen() uint32 { + return p.Length - p.HeadersLen - minMsgLen +} + +func (p messagePrelude) ValidateLens() error { + if p.Length == 0 || p.Length > maxMsgLen { + return LengthError{ + Part: "message prelude", + Want: maxMsgLen, + Have: int(p.Length), + } + } + if p.HeadersLen > maxHeadersLen { + return LengthError{ + Part: "message headers", + Want: maxHeadersLen, + Have: int(p.HeadersLen), + } + } + if payloadLen := p.PayloadLen(); payloadLen > maxPayloadLen { + return LengthError{ + Part: "message payload", + Want: maxPayloadLen, + Have: int(payloadLen), + } + } + + return nil +} + +type rawMessage struct { + messagePrelude + + Headers []byte + Payload []byte + + CRC uint32 +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go new file mode 100644 index 000000000000..e21614a12501 --- /dev/null +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/payload.go @@ -0,0 +1,81 @@ +package protocol + +import ( + "io" + "io/ioutil" + "net/http" + + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/client/metadata" + "github.com/aws/aws-sdk-go/aws/request" +) + +// PayloadUnmarshaler provides the interface for unmarshaling a payload's +// reader into a SDK shape. +type PayloadUnmarshaler interface { + UnmarshalPayload(io.Reader, interface{}) error +} + +// HandlerPayloadUnmarshal implements the PayloadUnmarshaler from a +// HandlerList. This provides the support for unmarshaling a payload reader to +// a shape without needing a SDK request first. +type HandlerPayloadUnmarshal struct { + Unmarshalers request.HandlerList +} + +// UnmarshalPayload unmarshals the io.Reader payload into the SDK shape using +// the Unmarshalers HandlerList provided. Returns an error if unable +// unmarshaling fails. +func (h HandlerPayloadUnmarshal) UnmarshalPayload(r io.Reader, v interface{}) error { + req := &request.Request{ + HTTPRequest: &http.Request{}, + HTTPResponse: &http.Response{ + StatusCode: 200, + Header: http.Header{}, + Body: ioutil.NopCloser(r), + }, + Data: v, + } + + h.Unmarshalers.Run(req) + + return req.Error +} + +// PayloadMarshaler provides the interface for marshaling a SDK shape into and +// io.Writer. +type PayloadMarshaler interface { + MarshalPayload(io.Writer, interface{}) error +} + +// HandlerPayloadMarshal implements the PayloadMarshaler from a HandlerList. +// This provides support for marshaling a SDK shape into an io.Writer without +// needing a SDK request first. +type HandlerPayloadMarshal struct { + Marshalers request.HandlerList +} + +// MarshalPayload marshals the SDK shape into the io.Writer using the +// Marshalers HandlerList provided. Returns an error if unable if marshal +// fails. +func (h HandlerPayloadMarshal) MarshalPayload(w io.Writer, v interface{}) error { + req := request.New( + aws.Config{}, + metadata.ClientInfo{}, + request.Handlers{}, + nil, + &request.Operation{HTTPMethod: "GET"}, + v, + nil, + ) + + h.Marshalers.Run(req) + + if req.Error != nil { + return req.Error + } + + io.Copy(w, req.GetBody()) + + return nil +} diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go index c405288d7423..f761e0b3a5b4 100644 --- a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/build.go @@ -20,8 +20,10 @@ import ( "github.com/aws/aws-sdk-go/private/protocol" ) -// RFC822 returns an RFC822 formatted timestamp for AWS protocols -const RFC822 = "Mon, 2 Jan 2006 15:04:05 GMT" +// RFC1123GMT is a RFC1123 (RFC822) formated timestame. This format is not +// using the standard library's time.RFC1123 due to the desire to always use +// GMT as the timezone. +const RFC1123GMT = "Mon, 2 Jan 2006 15:04:05 GMT" // Whether the byte value can be sent without escaping in AWS URLs var noEscape [256]bool @@ -270,7 +272,7 @@ func convertType(v reflect.Value, tag reflect.StructTag) (str string, err error) case float64: str = strconv.FormatFloat(value, 'f', -1, 64) case time.Time: - str = value.UTC().Format(RFC822) + str = value.UTC().Format(RFC1123GMT) case aws.JSONValue: if len(value) == 0 { return "", errValueNotSet diff --git a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go index 823f045eed79..9d4e7626775f 100644 --- a/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go +++ b/vendor/github.com/aws/aws-sdk-go/private/protocol/rest/unmarshal.go @@ -198,7 +198,7 @@ func unmarshalHeader(v reflect.Value, header string, tag reflect.StructTag) erro } v.Set(reflect.ValueOf(&f)) case *time.Time: - t, err := time.Parse(RFC822, header) + t, err := time.Parse(time.RFC1123, header) if err != nil { return err } diff --git a/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go b/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go index 4b0aa76edcd7..0d478662240a 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/cloudwatch/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "monitoring" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "monitoring" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "CloudWatch" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the CloudWatch client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, diff --git a/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go b/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go index 99d12a66e42f..b48e40e205c1 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go +++ b/vendor/github.com/aws/aws-sdk-go/service/ec2/api.go @@ -2268,11 +2268,7 @@ func (c *EC2) CancelSpotInstanceRequestsRequest(input *CancelSpotInstanceRequest // CancelSpotInstanceRequests API operation for Amazon Elastic Compute Cloud. // -// Cancels one or more Spot Instance requests. Spot Instances are instances -// that Amazon EC2 starts on your behalf when the maximum price that you specify -// exceeds the current Spot price. For more information, see Spot Instance Requests -// (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) in -// the Amazon EC2 User Guide for Linux Instances. +// Cancels one or more Spot Instance requests. // // Canceling a Spot Instance request does not terminate running Spot Instances // associated with the request. @@ -4179,8 +4175,8 @@ func (c *EC2) CreateNetworkInterfacePermissionRequest(input *CreateNetworkInterf // CreateNetworkInterfacePermission API operation for Amazon Elastic Compute Cloud. // -// Grants an AWS authorized partner account permission to attach the specified -// network interface to an instance in their account. +// Grants an AWS-authorized account permission to attach the specified network +// interface to an instance in their account. // // You can grant permission to a single AWS account only, and only one account // at a time. @@ -13675,11 +13671,7 @@ func (c *EC2) DescribeSpotInstanceRequestsRequest(input *DescribeSpotInstanceReq // DescribeSpotInstanceRequests API operation for Amazon Elastic Compute Cloud. // -// Describes the Spot Instance requests that belong to your account. Spot Instances -// are instances that Amazon EC2 launches when the Spot price that you specify -// exceeds the current Spot price. For more information, see Spot Instance Requests -// (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) in -// the Amazon EC2 User Guide for Linux Instances. +// Describes the specified Spot Instance requests. // // You can use DescribeSpotInstanceRequests to find a running Spot Instance // by examining the response. If the status of the Spot Instance is fulfilled, @@ -21367,9 +21359,9 @@ func (c *EC2) RequestSpotInstancesRequest(input *RequestSpotInstancesInput) (req // RequestSpotInstances API operation for Amazon Elastic Compute Cloud. // -// Creates a Spot Instance request. Spot Instances are instances that Amazon -// EC2 launches when the maximum price that you specify exceeds the current -// Spot price. For more information, see Spot Instance Requests (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) +// Creates a Spot Instance request. +// +// For more information, see Spot Instance Requests (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-requests.html) // in the Amazon EC2 User Guide for Linux Instances. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions @@ -37615,7 +37607,7 @@ type DescribeInstancesInput struct { // The maximum number of results to return in a single call. To retrieve the // remaining results, make another call with the returned NextToken value. This // value can be between 5 and 1000. You cannot specify this parameter and the - // instance IDs parameter or tag filters in the same call. + // instance IDs parameter in the same call. MaxResults *int64 `locationName:"maxResults" type:"integer"` // The token to request the next page of results. @@ -66458,19 +66450,23 @@ type StateReason struct { // The message for the state change. // - // * Server.InsufficientInstanceCapacity: There was insufficient instance - // capacity to satisfy the launch request. + // * Server.InsufficientInstanceCapacity: There was insufficient capacity + // available to satisfy the launch request. // - // * Server.InternalError: An internal error occurred during instance launch, - // resulting in termination. + // * Server.InternalError: An internal error caused the instance to terminate + // during launch. // // * Server.ScheduledStop: The instance was stopped due to a scheduled retirement. // - // * Server.SpotInstanceTermination: A Spot Instance was terminated due to - // an increase in the Spot price. + // * Server.SpotInstanceShutdown: The instance was stopped because the number + // of Spot requests with a maximum price equal to or higher than the Spot + // price exceeded available capacity or because of an increase in the Spot + // price. // - // * Client.InternalError: A client error caused the instance to terminate - // on launch. + // * Server.SpotInstanceTermination: The instance was terminated because + // the number of Spot requests with a maximum price equal to or higher than + // the Spot price exceeded available capacity or because of an increase in + // the Spot price. // // * Client.InstanceInitiatedShutdown: The instance was shut down using the // shutdown -h command from the instance. @@ -66478,14 +66474,17 @@ type StateReason struct { // * Client.InstanceTerminated: The instance was terminated or rebooted during // AMI creation. // + // * Client.InternalError: A client error caused the instance to terminate + // during launch. + // + // * Client.InvalidSnapshot.NotFound: The specified snapshot was not found. + // // * Client.UserInitiatedShutdown: The instance was shut down using the Amazon // EC2 API. // // * Client.VolumeLimitExceeded: The limit on the number of EBS volumes or // total storage was exceeded. Decrease usage or request an increase in your - // limits. - // - // * Client.InvalidSnapshot.NotFound: The specified snapshot was not found. + // account limits. Message *string `locationName:"message" type:"string"` } @@ -66969,7 +66968,7 @@ type TagSpecification struct { _ struct{} `type:"structure"` // The type of resource to tag. Currently, the resource types that support tagging - // on creation are instance and volume. + // on creation are instance, snapshot, and volume. ResourceType *string `locationName:"resourceType" type:"string" enum:"ResourceType"` // The tags to apply to the resource. @@ -70694,6 +70693,9 @@ const ( // InstanceTypeI316xlarge is a InstanceType enum value InstanceTypeI316xlarge = "i3.16xlarge" + // InstanceTypeI3Metal is a InstanceType enum value + InstanceTypeI3Metal = "i3.metal" + // InstanceTypeHi14xlarge is a InstanceType enum value InstanceTypeHi14xlarge = "hi1.4xlarge" @@ -70754,6 +70756,24 @@ const ( // InstanceTypeC518xlarge is a InstanceType enum value InstanceTypeC518xlarge = "c5.18xlarge" + // InstanceTypeC5dLarge is a InstanceType enum value + InstanceTypeC5dLarge = "c5d.large" + + // InstanceTypeC5dXlarge is a InstanceType enum value + InstanceTypeC5dXlarge = "c5d.xlarge" + + // InstanceTypeC5d2xlarge is a InstanceType enum value + InstanceTypeC5d2xlarge = "c5d.2xlarge" + + // InstanceTypeC5d4xlarge is a InstanceType enum value + InstanceTypeC5d4xlarge = "c5d.4xlarge" + + // InstanceTypeC5d9xlarge is a InstanceType enum value + InstanceTypeC5d9xlarge = "c5d.9xlarge" + + // InstanceTypeC5d18xlarge is a InstanceType enum value + InstanceTypeC5d18xlarge = "c5d.18xlarge" + // InstanceTypeCc14xlarge is a InstanceType enum value InstanceTypeCc14xlarge = "cc1.4xlarge" @@ -70832,6 +70852,24 @@ const ( // InstanceTypeM524xlarge is a InstanceType enum value InstanceTypeM524xlarge = "m5.24xlarge" + // InstanceTypeM5dLarge is a InstanceType enum value + InstanceTypeM5dLarge = "m5d.large" + + // InstanceTypeM5dXlarge is a InstanceType enum value + InstanceTypeM5dXlarge = "m5d.xlarge" + + // InstanceTypeM5d2xlarge is a InstanceType enum value + InstanceTypeM5d2xlarge = "m5d.2xlarge" + + // InstanceTypeM5d4xlarge is a InstanceType enum value + InstanceTypeM5d4xlarge = "m5d.4xlarge" + + // InstanceTypeM5d12xlarge is a InstanceType enum value + InstanceTypeM5d12xlarge = "m5d.12xlarge" + + // InstanceTypeM5d24xlarge is a InstanceType enum value + InstanceTypeM5d24xlarge = "m5d.24xlarge" + // InstanceTypeH12xlarge is a InstanceType enum value InstanceTypeH12xlarge = "h1.2xlarge" diff --git a/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go b/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go index ba4433d388eb..6acbc43fe3de 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/ec2/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "ec2" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "ec2" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "EC2" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the EC2 client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, diff --git a/vendor/github.com/aws/aws-sdk-go/service/s3/api.go b/vendor/github.com/aws/aws-sdk-go/service/s3/api.go index a27823fdfb51..07fc06af1f97 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/s3/api.go +++ b/vendor/github.com/aws/aws-sdk-go/service/s3/api.go @@ -3,14 +3,21 @@ package s3 import ( + "bytes" "fmt" "io" + "sync" + "sync/atomic" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/awsutil" + "github.com/aws/aws-sdk-go/aws/client" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/private/protocol" + "github.com/aws/aws-sdk-go/private/protocol/eventstream" + "github.com/aws/aws-sdk-go/private/protocol/eventstream/eventstreamapi" + "github.com/aws/aws-sdk-go/private/protocol/rest" "github.com/aws/aws-sdk-go/private/protocol/restxml" ) @@ -6017,6 +6024,88 @@ func (c *S3) RestoreObjectWithContext(ctx aws.Context, input *RestoreObjectInput return out, req.Send() } +const opSelectObjectContent = "SelectObjectContent" + +// SelectObjectContentRequest generates a "aws/request.Request" representing the +// client's request for the SelectObjectContent operation. The "output" return +// value will be populated with the request's response once the request completes +// successfuly. +// +// Use "Send" method on the returned Request to send the API call to the service. +// the "output" return value is not valid until after Send returns without error. +// +// See SelectObjectContent for more information on using the SelectObjectContent +// API call, and error handling. +// +// This method is useful when you want to inject custom logic or configuration +// into the SDK's request lifecycle. Such as custom headers, or retry logic. +// +// +// // Example sending a request using the SelectObjectContentRequest method. +// req, resp := client.SelectObjectContentRequest(params) +// +// err := req.Send() +// if err == nil { // resp is now filled +// fmt.Println(resp) +// } +// +// See also, https://docs.aws.amazon.com/goto/WebAPI/s3-2006-03-01/SelectObjectContent +func (c *S3) SelectObjectContentRequest(input *SelectObjectContentInput) (req *request.Request, output *SelectObjectContentOutput) { + op := &request.Operation{ + Name: opSelectObjectContent, + HTTPMethod: "POST", + HTTPPath: "/{Bucket}/{Key+}?select&select-type=2", + } + + if input == nil { + input = &SelectObjectContentInput{} + } + + output = &SelectObjectContentOutput{} + req = c.newRequest(op, input, output) + req.Handlers.Send.Swap(client.LogHTTPResponseHandler.Name, client.LogHTTPResponseHeaderHandler) + req.Handlers.Unmarshal.Swap(restxml.UnmarshalHandler.Name, rest.UnmarshalHandler) + req.Handlers.Unmarshal.PushBack(output.runEventStreamLoop) + return +} + +// SelectObjectContent API operation for Amazon Simple Storage Service. +// +// This operation filters the contents of an Amazon S3 object based on a simple +// Structured Query Language (SQL) statement. In the request, along with the +// SQL expression, you must also specify a data serialization format (JSON or +// CSV) of the object. Amazon S3 uses this to parse object data into records, +// and returns only records that match the specified SQL expression. You must +// also specify the data serialization format for the response. +// +// Returns awserr.Error for service API and SDK errors. Use runtime type assertions +// with awserr.Error's Code and Message methods to get detailed information about +// the error. +// +// See the AWS API reference guide for Amazon Simple Storage Service's +// API operation SelectObjectContent for usage and error information. +// See also, https://docs.aws.amazon.com/goto/WebAPI/s3-2006-03-01/SelectObjectContent +func (c *S3) SelectObjectContent(input *SelectObjectContentInput) (*SelectObjectContentOutput, error) { + req, out := c.SelectObjectContentRequest(input) + return out, req.Send() +} + +// SelectObjectContentWithContext is the same as SelectObjectContent with the addition of +// the ability to pass a context and additional request options. +// +// See SelectObjectContent for details on how to use this API operation. +// +// The context must be non-nil and will be used for request cancellation. If +// the context is nil a panic will occur. In the future the SDK may create +// sub-contexts for http.Requests. See https://golang.org/pkg/context/ +// for more information on using Contexts. +func (c *S3) SelectObjectContentWithContext(ctx aws.Context, input *SelectObjectContentInput, opts ...request.Option) (*SelectObjectContentOutput, error) { + req, out := c.SelectObjectContentRequest(input) + req.SetContext(ctx) + req.ApplyOptions(opts...) + return out, req.Send() +} + const opUploadPart = "UploadPart" // UploadPartRequest generates a "aws/request.Request" representing the @@ -7474,6 +7563,32 @@ func (s *Condition) SetKeyPrefixEquals(v string) *Condition { return s } +type ContinuationEvent struct { + _ struct{} `type:"structure"` +} + +// String returns the string representation +func (s ContinuationEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s ContinuationEvent) GoString() string { + return s.String() +} + +// The ContinuationEvent is and event in the SelectObjectContentEventStream group of events. +func (s *ContinuationEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the ContinuationEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *ContinuationEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + return nil +} + type CopyObjectInput struct { _ struct{} `type:"structure"` @@ -9919,6 +10034,32 @@ func (s *EncryptionConfiguration) SetReplicaKmsKeyID(v string) *EncryptionConfig return s } +type EndEvent struct { + _ struct{} `type:"structure"` +} + +// String returns the string representation +func (s EndEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s EndEvent) GoString() string { + return s.String() +} + +// The EndEvent is and event in the SelectObjectContentEventStream group of events. +func (s *EndEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the EndEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *EndEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + return nil +} + type Error struct { _ struct{} `type:"structure"` @@ -16380,6 +16521,87 @@ func (s *Part) SetSize(v int64) *Part { return s } +type Progress struct { + _ struct{} `type:"structure"` + + // Current number of uncompressed object bytes processed. + BytesProcessed *int64 `type:"long"` + + // Current number of bytes of records payload data returned. + BytesReturned *int64 `type:"long"` + + // Current number of object bytes scanned. + BytesScanned *int64 `type:"long"` +} + +// String returns the string representation +func (s Progress) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s Progress) GoString() string { + return s.String() +} + +// SetBytesProcessed sets the BytesProcessed field's value. +func (s *Progress) SetBytesProcessed(v int64) *Progress { + s.BytesProcessed = &v + return s +} + +// SetBytesReturned sets the BytesReturned field's value. +func (s *Progress) SetBytesReturned(v int64) *Progress { + s.BytesReturned = &v + return s +} + +// SetBytesScanned sets the BytesScanned field's value. +func (s *Progress) SetBytesScanned(v int64) *Progress { + s.BytesScanned = &v + return s +} + +type ProgressEvent struct { + _ struct{} `type:"structure" payload:"Details"` + + // The Progress event details. + Details *Progress `locationName:"Details" type:"structure"` +} + +// String returns the string representation +func (s ProgressEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s ProgressEvent) GoString() string { + return s.String() +} + +// SetDetails sets the Details field's value. +func (s *ProgressEvent) SetDetails(v *Progress) *ProgressEvent { + s.Details = v + return s +} + +// The ProgressEvent is and event in the SelectObjectContentEventStream group of events. +func (s *ProgressEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the ProgressEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *ProgressEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + if err := payloadUnmarshaler.UnmarshalPayload( + bytes.NewReader(msg.Payload), s, + ); err != nil { + return fmt.Errorf("failed to unmarshal payload, %v", err) + } + return nil +} + type PutBucketAccelerateConfigurationInput struct { _ struct{} `type:"structure" payload:"AccelerateConfiguration"` @@ -18622,6 +18844,45 @@ func (s *QueueConfigurationDeprecated) SetQueue(v string) *QueueConfigurationDep return s } +type RecordsEvent struct { + _ struct{} `type:"structure" payload:"Payload"` + + // The byte array of partial, one or more result records. + // + // Payload is automatically base64 encoded/decoded by the SDK. + Payload []byte `type:"blob"` +} + +// String returns the string representation +func (s RecordsEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s RecordsEvent) GoString() string { + return s.String() +} + +// SetPayload sets the Payload field's value. +func (s *RecordsEvent) SetPayload(v []byte) *RecordsEvent { + s.Payload = v + return s +} + +// The RecordsEvent is and event in the SelectObjectContentEventStream group of events. +func (s *RecordsEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the RecordsEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *RecordsEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + s.Payload = make([]byte, len(msg.Payload)) + copy(s.Payload, msg.Payload) + return nil +} + type Redirect struct { _ struct{} `type:"structure"` @@ -18939,6 +19200,30 @@ func (s *RequestPaymentConfiguration) SetPayer(v string) *RequestPaymentConfigur return s } +type RequestProgress struct { + _ struct{} `type:"structure"` + + // Specifies whether periodic QueryProgress frames should be sent. Valid values: + // TRUE, FALSE. Default value: FALSE. + Enabled *bool `type:"boolean"` +} + +// String returns the string representation +func (s RequestProgress) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s RequestProgress) GoString() string { + return s.String() +} + +// SetEnabled sets the Enabled field's value. +func (s *RequestProgress) SetEnabled(v bool) *RequestProgress { + s.Enabled = &v + return s +} + type RestoreObjectInput struct { _ struct{} `type:"structure" payload:"RestoreRequest"` @@ -19392,6 +19677,436 @@ func (s SSES3) GoString() string { return s.String() } +// SelectObjectContentEventStream provides handling of EventStreams for +// the SelectObjectContent API. +// +// Use this type to receive SelectObjectContentEventStream events. The events +// can be read from the Events channel member. +// +// The events that can be received are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +type SelectObjectContentEventStream struct { + // Reader is the EventStream reader for the SelectObjectContentEventStream + // events. This value is automatically set by the SDK when the API call is made + // Use this member when unit testing your code with the SDK to mock out the + // EventStream Reader. + // + // Must not be nil. + Reader SelectObjectContentEventStreamReader + + // StreamCloser is the io.Closer for the EventStream connection. For HTTP + // EventStream this is the response Body. The stream will be closed when + // the Close method of the EventStream is called. + StreamCloser io.Closer +} + +// Close closes the EventStream. This will also cause the Events channel to be +// closed. You can use the closing of the Events channel to terminate your +// application's read from the API's EventStream. +// +// Will close the underlying EventStream reader. For EventStream over HTTP +// connection this will also close the HTTP connection. +// +// Close must be called when done using the EventStream API. Not calling Close +// may result in resource leaks. +func (es *SelectObjectContentEventStream) Close() (err error) { + es.Reader.Close() + return es.Err() +} + +// Err returns any error that occurred while reading EventStream Events from +// the service API's response. Returns nil if there were no errors. +func (es *SelectObjectContentEventStream) Err() error { + if err := es.Reader.Err(); err != nil { + return err + } + es.StreamCloser.Close() + + return nil +} + +// Events returns a channel to read EventStream Events from the +// SelectObjectContent API. +// +// These events are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +func (es *SelectObjectContentEventStream) Events() <-chan SelectObjectContentEventStreamEvent { + return es.Reader.Events() +} + +// SelectObjectContentEventStreamEvent groups together all EventStream +// events read from the SelectObjectContent API. +// +// These events are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +type SelectObjectContentEventStreamEvent interface { + eventSelectObjectContentEventStream() +} + +// SelectObjectContentEventStreamReader provides the interface for reading EventStream +// Events from the SelectObjectContent API. The +// default implementation for this interface will be SelectObjectContentEventStream. +// +// The reader's Close method must allow multiple concurrent calls. +// +// These events are: +// +// * ContinuationEvent +// * EndEvent +// * ProgressEvent +// * RecordsEvent +// * StatsEvent +type SelectObjectContentEventStreamReader interface { + // Returns a channel of events as they are read from the event stream. + Events() <-chan SelectObjectContentEventStreamEvent + + // Close will close the underlying event stream reader. For event stream over + // HTTP this will also close the HTTP connection. + Close() error + + // Returns any error that has occured while reading from the event stream. + Err() error +} + +type readSelectObjectContentEventStream struct { + eventReader *eventstreamapi.EventReader + stream chan SelectObjectContentEventStreamEvent + errVal atomic.Value + + done chan struct{} + closeOnce sync.Once +} + +func newReadSelectObjectContentEventStream( + reader io.ReadCloser, + unmarshalers request.HandlerList, + logger aws.Logger, + logLevel aws.LogLevelType, +) *readSelectObjectContentEventStream { + r := &readSelectObjectContentEventStream{ + stream: make(chan SelectObjectContentEventStreamEvent), + done: make(chan struct{}), + } + + r.eventReader = eventstreamapi.NewEventReader( + reader, + protocol.HandlerPayloadUnmarshal{ + Unmarshalers: unmarshalers, + }, + r.unmarshalerForEventType, + ) + r.eventReader.UseLogger(logger, logLevel) + + return r +} + +// Close will close the underlying event stream reader. For EventStream over +// HTTP this will also close the HTTP connection. +func (r *readSelectObjectContentEventStream) Close() error { + r.closeOnce.Do(r.safeClose) + + return r.Err() +} + +func (r *readSelectObjectContentEventStream) safeClose() { + close(r.done) + err := r.eventReader.Close() + if err != nil { + r.errVal.Store(err) + } +} + +func (r *readSelectObjectContentEventStream) Err() error { + if v := r.errVal.Load(); v != nil { + return v.(error) + } + + return nil +} + +func (r *readSelectObjectContentEventStream) Events() <-chan SelectObjectContentEventStreamEvent { + return r.stream +} + +func (r *readSelectObjectContentEventStream) readEventStream() { + defer close(r.stream) + + for { + event, err := r.eventReader.ReadEvent() + if err != nil { + if err == io.EOF { + return + } + select { + case <-r.done: + // If closed already ignore the error + return + default: + } + r.errVal.Store(err) + return + } + + select { + case r.stream <- event.(SelectObjectContentEventStreamEvent): + case <-r.done: + return + } + } +} + +func (r *readSelectObjectContentEventStream) unmarshalerForEventType( + eventType string, +) (eventstreamapi.Unmarshaler, error) { + switch eventType { + case "Cont": + return &ContinuationEvent{}, nil + + case "End": + return &EndEvent{}, nil + + case "Progress": + return &ProgressEvent{}, nil + + case "Records": + return &RecordsEvent{}, nil + + case "Stats": + return &StatsEvent{}, nil + default: + return nil, fmt.Errorf( + "unknown event type name, %s, for SelectObjectContentEventStream", eventType) + } +} + +// Request to filter the contents of an Amazon S3 object based on a simple Structured +// Query Language (SQL) statement. In the request, along with the SQL expression, +// you must also specify a data serialization format (JSON or CSV) of the object. +// Amazon S3 uses this to parse object data into records, and returns only records +// that match the specified SQL expression. You must also specify the data serialization +// format for the response. For more information, go to S3Select API Documentation +// (https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectSELECTContent.html) +type SelectObjectContentInput struct { + _ struct{} `locationName:"SelectObjectContentRequest" type:"structure" xmlURI:"http://s3.amazonaws.com/doc/2006-03-01/"` + + // The S3 Bucket. + // + // Bucket is a required field + Bucket *string `location:"uri" locationName:"Bucket" type:"string" required:"true"` + + // The expression that is used to query the object. + // + // Expression is a required field + Expression *string `type:"string" required:"true"` + + // The type of the provided expression (e.g., SQL). + // + // ExpressionType is a required field + ExpressionType *string `type:"string" required:"true" enum:"ExpressionType"` + + // Describes the format of the data in the object that is being queried. + // + // InputSerialization is a required field + InputSerialization *InputSerialization `type:"structure" required:"true"` + + // The Object Key. + // + // Key is a required field + Key *string `location:"uri" locationName:"Key" min:"1" type:"string" required:"true"` + + // Describes the format of the data that you want Amazon S3 to return in response. + // + // OutputSerialization is a required field + OutputSerialization *OutputSerialization `type:"structure" required:"true"` + + // Specifies if periodic request progress information should be enabled. + RequestProgress *RequestProgress `type:"structure"` + + // The SSE Algorithm used to encrypt the object. For more information, go to + // Server-Side Encryption (Using Customer-Provided Encryption Keys (https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html) + SSECustomerAlgorithm *string `location:"header" locationName:"x-amz-server-side-encryption-customer-algorithm" type:"string"` + + // The SSE Customer Key. For more information, go to Server-Side Encryption + // (Using Customer-Provided Encryption Keys (https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html) + SSECustomerKey *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key" type:"string"` + + // The SSE Customer Key MD5. For more information, go to Server-Side Encryption + // (Using Customer-Provided Encryption Keys (https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html) + SSECustomerKeyMD5 *string `location:"header" locationName:"x-amz-server-side-encryption-customer-key-MD5" type:"string"` +} + +// String returns the string representation +func (s SelectObjectContentInput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s SelectObjectContentInput) GoString() string { + return s.String() +} + +// Validate inspects the fields of the type to determine if they are valid. +func (s *SelectObjectContentInput) Validate() error { + invalidParams := request.ErrInvalidParams{Context: "SelectObjectContentInput"} + if s.Bucket == nil { + invalidParams.Add(request.NewErrParamRequired("Bucket")) + } + if s.Expression == nil { + invalidParams.Add(request.NewErrParamRequired("Expression")) + } + if s.ExpressionType == nil { + invalidParams.Add(request.NewErrParamRequired("ExpressionType")) + } + if s.InputSerialization == nil { + invalidParams.Add(request.NewErrParamRequired("InputSerialization")) + } + if s.Key == nil { + invalidParams.Add(request.NewErrParamRequired("Key")) + } + if s.Key != nil && len(*s.Key) < 1 { + invalidParams.Add(request.NewErrParamMinLen("Key", 1)) + } + if s.OutputSerialization == nil { + invalidParams.Add(request.NewErrParamRequired("OutputSerialization")) + } + + if invalidParams.Len() > 0 { + return invalidParams + } + return nil +} + +// SetBucket sets the Bucket field's value. +func (s *SelectObjectContentInput) SetBucket(v string) *SelectObjectContentInput { + s.Bucket = &v + return s +} + +func (s *SelectObjectContentInput) getBucket() (v string) { + if s.Bucket == nil { + return v + } + return *s.Bucket +} + +// SetExpression sets the Expression field's value. +func (s *SelectObjectContentInput) SetExpression(v string) *SelectObjectContentInput { + s.Expression = &v + return s +} + +// SetExpressionType sets the ExpressionType field's value. +func (s *SelectObjectContentInput) SetExpressionType(v string) *SelectObjectContentInput { + s.ExpressionType = &v + return s +} + +// SetInputSerialization sets the InputSerialization field's value. +func (s *SelectObjectContentInput) SetInputSerialization(v *InputSerialization) *SelectObjectContentInput { + s.InputSerialization = v + return s +} + +// SetKey sets the Key field's value. +func (s *SelectObjectContentInput) SetKey(v string) *SelectObjectContentInput { + s.Key = &v + return s +} + +// SetOutputSerialization sets the OutputSerialization field's value. +func (s *SelectObjectContentInput) SetOutputSerialization(v *OutputSerialization) *SelectObjectContentInput { + s.OutputSerialization = v + return s +} + +// SetRequestProgress sets the RequestProgress field's value. +func (s *SelectObjectContentInput) SetRequestProgress(v *RequestProgress) *SelectObjectContentInput { + s.RequestProgress = v + return s +} + +// SetSSECustomerAlgorithm sets the SSECustomerAlgorithm field's value. +func (s *SelectObjectContentInput) SetSSECustomerAlgorithm(v string) *SelectObjectContentInput { + s.SSECustomerAlgorithm = &v + return s +} + +// SetSSECustomerKey sets the SSECustomerKey field's value. +func (s *SelectObjectContentInput) SetSSECustomerKey(v string) *SelectObjectContentInput { + s.SSECustomerKey = &v + return s +} + +func (s *SelectObjectContentInput) getSSECustomerKey() (v string) { + if s.SSECustomerKey == nil { + return v + } + return *s.SSECustomerKey +} + +// SetSSECustomerKeyMD5 sets the SSECustomerKeyMD5 field's value. +func (s *SelectObjectContentInput) SetSSECustomerKeyMD5(v string) *SelectObjectContentInput { + s.SSECustomerKeyMD5 = &v + return s +} + +type SelectObjectContentOutput struct { + _ struct{} `type:"structure" payload:"Payload"` + + // Use EventStream to use the API's stream. + EventStream *SelectObjectContentEventStream `type:"structure"` +} + +// String returns the string representation +func (s SelectObjectContentOutput) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s SelectObjectContentOutput) GoString() string { + return s.String() +} + +// SetEventStream sets the EventStream field's value. +func (s *SelectObjectContentOutput) SetEventStream(v *SelectObjectContentEventStream) *SelectObjectContentOutput { + s.EventStream = v + return s +} + +func (s *SelectObjectContentOutput) runEventStreamLoop(r *request.Request) { + if r.Error != nil { + return + } + reader := newReadSelectObjectContentEventStream( + r.HTTPResponse.Body, + r.Handlers.UnmarshalStream, + r.Config.Logger, + r.Config.LogLevel.Value(), + ) + go reader.readEventStream() + + eventStream := &SelectObjectContentEventStream{ + StreamCloser: r.HTTPResponse.Body, + Reader: reader, + } + s.EventStream = eventStream +} + // Describes the parameters for Select job types. type SelectParameters struct { _ struct{} `type:"structure"` @@ -19696,6 +20411,87 @@ func (s *SseKmsEncryptedObjects) SetStatus(v string) *SseKmsEncryptedObjects { return s } +type Stats struct { + _ struct{} `type:"structure"` + + // Total number of uncompressed object bytes processed. + BytesProcessed *int64 `type:"long"` + + // Total number of bytes of records payload data returned. + BytesReturned *int64 `type:"long"` + + // Total number of object bytes scanned. + BytesScanned *int64 `type:"long"` +} + +// String returns the string representation +func (s Stats) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s Stats) GoString() string { + return s.String() +} + +// SetBytesProcessed sets the BytesProcessed field's value. +func (s *Stats) SetBytesProcessed(v int64) *Stats { + s.BytesProcessed = &v + return s +} + +// SetBytesReturned sets the BytesReturned field's value. +func (s *Stats) SetBytesReturned(v int64) *Stats { + s.BytesReturned = &v + return s +} + +// SetBytesScanned sets the BytesScanned field's value. +func (s *Stats) SetBytesScanned(v int64) *Stats { + s.BytesScanned = &v + return s +} + +type StatsEvent struct { + _ struct{} `type:"structure" payload:"Details"` + + // The Stats event details. + Details *Stats `locationName:"Details" type:"structure"` +} + +// String returns the string representation +func (s StatsEvent) String() string { + return awsutil.Prettify(s) +} + +// GoString returns the string representation +func (s StatsEvent) GoString() string { + return s.String() +} + +// SetDetails sets the Details field's value. +func (s *StatsEvent) SetDetails(v *Stats) *StatsEvent { + s.Details = v + return s +} + +// The StatsEvent is and event in the SelectObjectContentEventStream group of events. +func (s *StatsEvent) eventSelectObjectContentEventStream() {} + +// UnmarshalEvent unmarshals the EventStream Message into the StatsEvent value. +// This method is only used internally within the SDK's EventStream handling. +func (s *StatsEvent) UnmarshalEvent( + payloadUnmarshaler protocol.PayloadUnmarshaler, + msg eventstream.Message, +) error { + if err := payloadUnmarshaler.UnmarshalPayload( + bytes.NewReader(msg.Payload), s, + ); err != nil { + return fmt.Errorf("failed to unmarshal payload, %v", err) + } + return nil +} + type StorageClassAnalysis struct { _ struct{} `type:"structure"` diff --git a/vendor/github.com/aws/aws-sdk-go/service/s3/service.go b/vendor/github.com/aws/aws-sdk-go/service/s3/service.go index 614e477d3bb7..20de53f29d79 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/s3/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/s3/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "s3" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "s3" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "S3" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the S3 client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, @@ -71,6 +73,8 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio svc.Handlers.UnmarshalMeta.PushBackNamed(restxml.UnmarshalMetaHandler) svc.Handlers.UnmarshalError.PushBackNamed(restxml.UnmarshalErrorHandler) + svc.Handlers.UnmarshalStream.PushBackNamed(restxml.UnmarshalHandler) + // Run custom client initialization if present if initClient != nil { initClient(svc.Client) diff --git a/vendor/github.com/aws/aws-sdk-go/service/sts/service.go b/vendor/github.com/aws/aws-sdk-go/service/sts/service.go index 1ee5839e0462..185c914d1b30 100644 --- a/vendor/github.com/aws/aws-sdk-go/service/sts/service.go +++ b/vendor/github.com/aws/aws-sdk-go/service/sts/service.go @@ -29,8 +29,9 @@ var initRequest func(*request.Request) // Service information constants const ( - ServiceName = "sts" // Service endpoint prefix API calls made to. - EndpointsID = ServiceName // Service ID for Regions and Endpoints metadata. + ServiceName = "sts" // Name of service. + EndpointsID = ServiceName // ID to lookup a service endpoint with. + ServiceID = "STS" // ServiceID is a unique identifer of a specific service. ) // New creates a new instance of the STS client with a session. @@ -55,6 +56,7 @@ func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegio cfg, metadata.ClientInfo{ ServiceName: ServiceName, + ServiceID: ServiceID, SigningName: signingName, SigningRegion: signingRegion, Endpoint: endpoint, diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE b/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE new file mode 100644 index 000000000000..c35c17af9808 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2015 Dmitri Shuralyov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/main.go b/vendor/github.com/shurcooL/sanitized_anchor_name/main.go new file mode 100644 index 000000000000..6a77d1243173 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/main.go @@ -0,0 +1,29 @@ +// Package sanitized_anchor_name provides a func to create sanitized anchor names. +// +// Its logic can be reused by multiple packages to create interoperable anchor names +// and links to those anchors. +// +// At this time, it does not try to ensure that generated anchor names +// are unique, that responsibility falls on the caller. +package sanitized_anchor_name // import "github.com/shurcooL/sanitized_anchor_name" + +import "unicode" + +// Create returns a sanitized anchor name for the given text. +func Create(text string) string { + var anchorName []rune + var futureDash = false + for _, r := range text { + switch { + case unicode.IsLetter(r) || unicode.IsNumber(r): + if futureDash && len(anchorName) > 0 { + anchorName = append(anchorName, '-') + } + futureDash = false + anchorName = append(anchorName, unicode.ToLower(r)) + default: + futureDash = true + } + } + return string(anchorName) +} From 40ed235b3ba29de41081ba59110824caa0671801 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Mon, 16 Apr 2018 16:50:13 +0900 Subject: [PATCH 099/263] support GetMetricData --- pkg/metrics/metrics.go | 8 + pkg/tsdb/cloudwatch/cloudwatch.go | 213 +++++++++++++++--- pkg/tsdb/cloudwatch/types.go | 4 + .../datasource/cloudwatch/datasource.ts | 20 +- .../cloudwatch/partials/query.parameter.html | 41 ++-- .../cloudwatch/query_parameter_ctrl.ts | 3 + 6 files changed, 243 insertions(+), 46 deletions(-) diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go index 4dd84c121517..a8d9f7308fa3 100644 --- a/pkg/metrics/metrics.go +++ b/pkg/metrics/metrics.go @@ -44,6 +44,7 @@ var ( M_Alerting_Notification_Sent *prometheus.CounterVec M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter M_Aws_CloudWatch_ListMetrics prometheus.Counter + M_Aws_CloudWatch_GetMetricData prometheus.Counter M_DB_DataSource_QueryById prometheus.Counter // Timers @@ -218,6 +219,12 @@ func init() { Namespace: exporterName, }) + M_Aws_CloudWatch_GetMetricData = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "aws_cloudwatch_get_metric_data_total", + Help: "counter for getting metric data time series from aws", + Namespace: exporterName, + }) + M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{ Name: "db_datasource_query_by_id_total", Help: "counter for getting datasource by id", @@ -307,6 +314,7 @@ func initMetricVars() { M_Alerting_Notification_Sent, M_Aws_CloudWatch_GetMetricStatistics, M_Aws_CloudWatch_ListMetrics, + M_Aws_CloudWatch_GetMetricData, M_DB_DataSource_QueryById, M_Alerting_Active_Alerts, M_StatTotal_Dashboards, diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 8af97575ae9e..54634bc06141 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -14,6 +14,7 @@ import ( "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/tsdb" + "golang.org/x/sync/errgroup" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/request" @@ -88,48 +89,63 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo Results: make(map[string]*tsdb.QueryResult), } - errCh := make(chan error, 1) - resCh := make(chan *tsdb.QueryResult, 1) + eg, ectx := errgroup.WithContext(ctx) - currentlyExecuting := 0 + getMetricDataQueries := make(map[string]map[string]*CloudWatchQuery) for i, model := range queryContext.Queries { queryType := model.Model.Get("type").MustString() if queryType != "timeSeriesQuery" && queryType != "" { continue } - currentlyExecuting++ - go func(refId string, index int) { - queryRes, err := e.executeQuery(ctx, queryContext.Queries[index].Model, queryContext) - currentlyExecuting-- + + query, err := parseQuery(queryContext.Queries[i].Model) + if err != nil { + return nil, err + } + query.RefId = queryContext.Queries[i].RefId + + if query.Id != "" { + if _, ok := getMetricDataQueries[query.Region]; !ok { + getMetricDataQueries[query.Region] = make(map[string]*CloudWatchQuery) + } + getMetricDataQueries[query.Region][query.Id] = query + continue + } + + eg.Go(func() error { + queryRes, err := e.executeQuery(ectx, query, queryContext) if err != nil { - errCh <- err - } else { - queryRes.RefId = refId - resCh <- queryRes + return err } - }(model.RefId, i) + result.Results[queryRes.RefId] = queryRes + return nil + }) } - for currentlyExecuting != 0 { - select { - case res := <-resCh: - result.Results[res.RefId] = res - case err := <-errCh: - return result, err - case <-ctx.Done(): - return result, ctx.Err() + if len(getMetricDataQueries) > 0 { + for region, getMetricDataQuery := range getMetricDataQueries { + q := getMetricDataQuery + eg.Go(func() error { + queryResponses, err := e.executeGetMetricDataQuery(ectx, region, q, queryContext) + if err != nil { + return err + } + for _, queryRes := range queryResponses { + result.Results[queryRes.RefId] = queryRes + } + return nil + }) } } - return result, nil -} - -func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) (*tsdb.QueryResult, error) { - query, err := parseQuery(parameters) - if err != nil { + if err := eg.Wait(); err != nil { return nil, err } + return result, nil +} + +func (e *CloudWatchExecutor) executeQuery(ctx context.Context, query *CloudWatchQuery, queryContext *tsdb.TsdbQuery) (*tsdb.QueryResult, error) { client, err := e.getClient(query.Region) if err != nil { return nil, err @@ -201,6 +217,139 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simpl return queryRes, nil } +func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, region string, queries map[string]*CloudWatchQuery, queryContext *tsdb.TsdbQuery) ([]*tsdb.QueryResult, error) { + queryResponses := make([]*tsdb.QueryResult, 0) + + // validate query + for _, query := range queries { + if !(len(query.Statistics) == 1 && len(query.ExtendedStatistics) == 0) && + !(len(query.Statistics) == 0 && len(query.ExtendedStatistics) == 1) { + return queryResponses, errors.New("Statistics count should be 1") + } + } + + client, err := e.getClient(region) + if err != nil { + return queryResponses, err + } + + startTime, err := queryContext.TimeRange.ParseFrom() + if err != nil { + return queryResponses, err + } + + endTime, err := queryContext.TimeRange.ParseTo() + if err != nil { + return queryResponses, err + } + + params := &cloudwatch.GetMetricDataInput{ + StartTime: aws.Time(startTime), + EndTime: aws.Time(endTime), + ScanBy: aws.String("TimestampAscending"), + } + for _, query := range queries { + // 1 minutes resolutin metrics is stored for 15 days, 15 * 24 * 60 = 21600 + if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) { + return nil, errors.New("too long query period") + } + + mdq := &cloudwatch.MetricDataQuery{ + Id: aws.String(query.Id), + ReturnData: aws.Bool(query.ReturnData), + } + if query.Expression != "" { + mdq.Expression = aws.String(query.Expression) + } else { + mdq.MetricStat = &cloudwatch.MetricStat{ + Metric: &cloudwatch.Metric{ + Namespace: aws.String(query.Namespace), + MetricName: aws.String(query.MetricName), + }, + Period: aws.Int64(int64(query.Period)), + } + for _, d := range query.Dimensions { + mdq.MetricStat.Metric.Dimensions = append(mdq.MetricStat.Metric.Dimensions, + &cloudwatch.Dimension{ + Name: d.Name, + Value: d.Value, + }) + } + if len(query.Statistics) == 1 { + mdq.MetricStat.Stat = query.Statistics[0] + } else { + mdq.MetricStat.Stat = query.ExtendedStatistics[0] + } + } + params.MetricDataQueries = append(params.MetricDataQueries, mdq) + } + + nextToken := "" + mdr := make(map[string]*cloudwatch.MetricDataResult) + for { + if nextToken != "" { + params.NextToken = aws.String(nextToken) + } + resp, err := client.GetMetricDataWithContext(ctx, params) + if err != nil { + return queryResponses, err + } + metrics.M_Aws_CloudWatch_GetMetricData.Add(float64(len(params.MetricDataQueries))) + + for _, r := range resp.MetricDataResults { + if _, ok := mdr[*r.Id]; !ok { + mdr[*r.Id] = r + } else { + mdr[*r.Id].Timestamps = append(mdr[*r.Id].Timestamps, r.Timestamps...) + mdr[*r.Id].Values = append(mdr[*r.Id].Values, r.Values...) + } + } + + if resp.NextToken == nil || *resp.NextToken == "" { + break + } + nextToken = *resp.NextToken + } + + for i, r := range mdr { + if *r.StatusCode != "Complete" { + return queryResponses, fmt.Errorf("Part of query is failed: %s", *r.StatusCode) + } + + queryRes := tsdb.NewQueryResult() + queryRes.RefId = queries[i].RefId + query := queries[*r.Id] + + series := tsdb.TimeSeries{ + Tags: map[string]string{}, + Points: make([]tsdb.TimePoint, 0), + } + for _, d := range query.Dimensions { + series.Tags[*d.Name] = *d.Value + } + s := "" + if len(query.Statistics) == 1 { + s = *query.Statistics[0] + } else { + s = *query.ExtendedStatistics[0] + } + series.Name = formatAlias(query, s, series.Tags) + + for j, t := range r.Timestamps { + expectedTimestamp := r.Timestamps[j].Add(time.Duration(query.Period) * time.Second) + if j > 0 && expectedTimestamp.Before(*t) { + series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), float64(expectedTimestamp.Unix()*1000))) + } + series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(*r.Values[j]), float64((*t).Unix())*1000)) + } + + queryRes.Series = append(queryRes.Series, &series) + queryResponses = append(queryResponses, queryRes) + } + + return queryResponses, nil +} + func parseDimensions(model *simplejson.Json) ([]*cloudwatch.Dimension, error) { var result []*cloudwatch.Dimension @@ -257,6 +406,9 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { return nil, err } + id := model.Get("id").MustString("") + expression := model.Get("expression").MustString("") + dimensions, err := parseDimensions(model) if err != nil { return nil, err @@ -295,6 +447,7 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { alias = "{{metric}}_{{stat}}" } + returnData := model.Get("returnData").MustBool(false) highResolution := model.Get("highResolution").MustBool(false) return &CloudWatchQuery{ @@ -306,11 +459,18 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { ExtendedStatistics: aws.StringSlice(extendedStatistics), Period: period, Alias: alias, + Id: id, + Expression: expression, + ReturnData: returnData, HighResolution: highResolution, }, nil } func formatAlias(query *CloudWatchQuery, stat string, dimensions map[string]string) string { + if len(query.Id) > 0 && len(query.Expression) > 0 { + return query.Id + } + data := map[string]string{} data["region"] = query.Region data["namespace"] = query.Namespace @@ -338,6 +498,7 @@ func formatAlias(query *CloudWatchQuery, stat string, dimensions map[string]stri func parseResponse(resp *cloudwatch.GetMetricStatisticsOutput, query *CloudWatchQuery) (*tsdb.QueryResult, error) { queryRes := tsdb.NewQueryResult() + queryRes.RefId = query.RefId var value float64 for _, s := range append(query.Statistics, query.ExtendedStatistics...) { series := tsdb.TimeSeries{ diff --git a/pkg/tsdb/cloudwatch/types.go b/pkg/tsdb/cloudwatch/types.go index 0737b64686da..1225fb9b31ba 100644 --- a/pkg/tsdb/cloudwatch/types.go +++ b/pkg/tsdb/cloudwatch/types.go @@ -5,6 +5,7 @@ import ( ) type CloudWatchQuery struct { + RefId string Region string Namespace string MetricName string @@ -13,5 +14,8 @@ type CloudWatchQuery struct { ExtendedStatistics []*string Period int Alias string + Id string + Expression string + ReturnData bool HighResolution bool } diff --git a/public/app/plugins/datasource/cloudwatch/datasource.ts b/public/app/plugins/datasource/cloudwatch/datasource.ts index 4101759ec1da..74100e5d69a8 100644 --- a/public/app/plugins/datasource/cloudwatch/datasource.ts +++ b/public/app/plugins/datasource/cloudwatch/datasource.ts @@ -30,7 +30,9 @@ export default class CloudWatchDatasource { var queries = _.filter(options.targets, item => { return ( - item.hide !== true && !!item.region && !!item.namespace && !!item.metricName && !_.isEmpty(item.statistics) + (item.id !== '' || item.hide !== true) && + ((!!item.region && !!item.namespace && !!item.metricName && !_.isEmpty(item.statistics)) || + item.expression.length > 0) ); }).map(item => { item.region = this.templateSrv.replace(this.getActualRegion(item.region), options.scopedVars); @@ -38,6 +40,9 @@ export default class CloudWatchDatasource { item.metricName = this.templateSrv.replace(item.metricName, options.scopedVars); item.dimensions = this.convertDimensionFormat(item.dimensions, options.scopedVars); item.period = String(this.getPeriod(item, options)); // use string format for period in graph query, and alerting + item.id = this.templateSrv.replace(item.id, options.scopedVars); + item.expression = this.templateSrv.replace(item.expression, options.scopedVars); + item.returnData = typeof item.hide === 'undefined' ? true : !item.hide; return _.extend( { @@ -384,11 +389,11 @@ export default class CloudWatchDatasource { var currentVariables = !_.isArray(variable.current.value) ? [variable.current] : variable.current.value.map(v => { - return { - text: v, - value: v, - }; - }); + return { + text: v, + value: v, + }; + }); let useSelectedVariables = selectedVariables.some(s => { return s.value === currentVariables[0].value; @@ -399,6 +404,9 @@ export default class CloudWatchDatasource { scopedVar[variable.name] = v; t.refId = target.refId + '_' + v.value; t.dimensions[dimensionKey] = templateSrv.replace(t.dimensions[dimensionKey], scopedVar); + if (target.id) { + t.id = target.id + window.btoa(v.value).replace(/=/g, '0'); // generate unique id + } return t; }); } diff --git a/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html b/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html index 81bad39e23ab..57a59f802659 100644 --- a/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html +++ b/public/app/plugins/datasource/cloudwatch/partials/query.parameter.html @@ -1,4 +1,4 @@ -
    +
    @@ -20,7 +20,7 @@
    -
    +
    @@ -31,18 +31,31 @@
    -
    +
    -
    +
    + + +
    +
    + +
    +
    + - +
    -
    - - - +
    + + + Alias replacement variables:
    • {{metric}}
    • @@ -54,12 +67,12 @@
    -
    - +
    +
    -
    -
    +
    +
    diff --git a/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts b/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts index 0b47ebd70690..689cf270febc 100644 --- a/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts +++ b/public/app/plugins/datasource/cloudwatch/query_parameter_ctrl.ts @@ -27,6 +27,9 @@ export class CloudWatchQueryParameterCtrl { target.dimensions = target.dimensions || {}; target.period = target.period || ''; target.region = target.region || 'default'; + target.id = target.id || ''; + target.expression = target.expression || ''; + target.returnData = target.returnData || false; target.highResolution = target.highResolution || false; $scope.regionSegment = uiSegmentSrv.getSegmentForValue($scope.target.region, 'select region'); From 4c59be4f5b0c4b3841a8190ec79139a581c3db84 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Fri, 22 Jun 2018 16:25:04 +0900 Subject: [PATCH 100/263] generate unique id when variable is multi --- .../plugins/datasource/cloudwatch/datasource.ts | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/public/app/plugins/datasource/cloudwatch/datasource.ts b/public/app/plugins/datasource/cloudwatch/datasource.ts index 74100e5d69a8..41e335dc320e 100644 --- a/public/app/plugins/datasource/cloudwatch/datasource.ts +++ b/public/app/plugins/datasource/cloudwatch/datasource.ts @@ -389,11 +389,11 @@ export default class CloudWatchDatasource { var currentVariables = !_.isArray(variable.current.value) ? [variable.current] : variable.current.value.map(v => { - return { - text: v, - value: v, - }; - }); + return { + text: v, + value: v, + }; + }); let useSelectedVariables = selectedVariables.some(s => { return s.value === currentVariables[0].value; @@ -404,8 +404,10 @@ export default class CloudWatchDatasource { scopedVar[variable.name] = v; t.refId = target.refId + '_' + v.value; t.dimensions[dimensionKey] = templateSrv.replace(t.dimensions[dimensionKey], scopedVar); - if (target.id) { + if (variable.multi && target.id) { t.id = target.id + window.btoa(v.value).replace(/=/g, '0'); // generate unique id + } else { + t.id = target.id; } return t; }); From 77220456b6c5478e3dc0addfc3838008fb1ea2a5 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Fri, 22 Jun 2018 16:35:17 +0900 Subject: [PATCH 101/263] improve error message --- pkg/tsdb/cloudwatch/cloudwatch.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 54634bc06141..38fbac3aa292 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -112,6 +112,10 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo continue } + if query.Id == "" && query.Expression != "" { + return nil, fmt.Errorf("Invalid query: id should be set if using expression") + } + eg.Go(func() error { queryRes, err := e.executeQuery(ectx, query, queryContext) if err != nil { From 677117fb034f94ef1d49bfe80207d258c062d974 Mon Sep 17 00:00:00 2001 From: Jesse Tane Date: Mon, 9 Jul 2018 00:58:34 -0400 Subject: [PATCH 102/263] fix diff and percent_diff (#12515) * make diff and percent_diff tests more realistic * fix diff and percent_diff * include @marefr's additional tests --- pkg/services/alerting/conditions/reducer.go | 8 +++---- .../alerting/conditions/reducer_test.go | 23 +++++++++++++++++-- 2 files changed, 25 insertions(+), 6 deletions(-) diff --git a/pkg/services/alerting/conditions/reducer.go b/pkg/services/alerting/conditions/reducer.go index 0a61c13fa120..1e8ae7927466 100644 --- a/pkg/services/alerting/conditions/reducer.go +++ b/pkg/services/alerting/conditions/reducer.go @@ -108,9 +108,9 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float { break } } - // get other points + // get the oldest point points = points[0:i] - for i := len(points) - 1; i >= 0; i-- { + for i := 0; i < len(points); i++ { if points[i][0].Valid { allNull = false value = first - points[i][0].Float64 @@ -131,9 +131,9 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float { break } } - // get other points + // get the oldest point points = points[0:i] - for i := len(points) - 1; i >= 0; i-- { + for i := 0; i < len(points); i++ { if points[i][0].Valid { allNull = false val := (first - points[i][0].Float64) / points[i][0].Float64 * 100 diff --git a/pkg/services/alerting/conditions/reducer_test.go b/pkg/services/alerting/conditions/reducer_test.go index 866b574f59f6..9d4e14626906 100644 --- a/pkg/services/alerting/conditions/reducer_test.go +++ b/pkg/services/alerting/conditions/reducer_test.go @@ -110,16 +110,35 @@ func TestSimpleReducer(t *testing.T) { So(reducer.Reduce(series).Float64, ShouldEqual, float64(3)) }) - Convey("diff", func() { + Convey("diff one point", func() { + result := testReducer("diff", 30) + So(result, ShouldEqual, float64(0)) + }) + + Convey("diff two points", func() { result := testReducer("diff", 30, 40) So(result, ShouldEqual, float64(10)) }) - Convey("percent_diff", func() { + Convey("diff three points", func() { + result := testReducer("diff", 30, 40, 40) + So(result, ShouldEqual, float64(10)) + }) + + Convey("percent_diff one point", func() { + result := testReducer("percent_diff", 40) + So(result, ShouldEqual, float64(0)) + }) + + Convey("percent_diff two points", func() { result := testReducer("percent_diff", 30, 40) So(result, ShouldEqual, float64(33.33333333333333)) }) + Convey("percent_diff three points", func() { + result := testReducer("percent_diff", 30, 40, 40) + So(result, ShouldEqual, float64(33.33333333333333)) + }) }) } From 5a925461f11d201d9571c2396f1087ba83c2c926 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 9 Jul 2018 07:00:11 +0200 Subject: [PATCH 103/263] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1f9cc951e344..0ebb038546e9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) +* **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) # 5.2.2 (unreleased) From 6670acd0825893d937006e81cb378cbeee6da9c2 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 9 Jul 2018 12:52:56 +0200 Subject: [PATCH 104/263] Remove irrelevant tests and templateSrv stub --- .../datasource/cloudwatch/datasource.ts | 1 + .../cloudwatch/specs/datasource.jest.ts | 110 ++---------------- 2 files changed, 10 insertions(+), 101 deletions(-) diff --git a/public/app/plugins/datasource/cloudwatch/datasource.ts b/public/app/plugins/datasource/cloudwatch/datasource.ts index 4101759ec1da..391f65bd7aed 100644 --- a/public/app/plugins/datasource/cloudwatch/datasource.ts +++ b/public/app/plugins/datasource/cloudwatch/datasource.ts @@ -404,6 +404,7 @@ export default class CloudWatchDatasource { } expandTemplateVariable(targets, scopedVars, templateSrv) { + // Datasource and template srv logic uber-complected. This should be cleaned up. return _.chain(targets) .map(target => { var dimensionKey = _.findKey(target.dimensions, v => { diff --git a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts index fbeaaad8e676..b06d00b3ee4e 100644 --- a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts @@ -1,14 +1,19 @@ import '../datasource'; -import { TemplateSrvStub } from 'test/specs/helpers'; import CloudWatchDatasource from '../datasource'; import 'app/features/dashboard/time_srv'; import * as dateMath from 'app/core/utils/datemath'; +import _ from 'lodash'; describe('CloudWatchDatasource', function() { let instanceSettings = { jsonData: { defaultRegion: 'us-east-1', access: 'proxy' }, }; - let templateSrv = new TemplateSrvStub(); + + let templateSrv = { + templateSettings: { interpolate: /\[\[([\s\S]+?)\]\]/g }, + replace: jest.fn(text => _.template(text, templateSrv.templateSettings)(templateSrv.data)), + variableExists: jest.fn(() => false), + }; let timeSrv = { time: { from: 'now-1h', to: 'now' }, @@ -68,8 +73,8 @@ describe('CloudWatchDatasource', function() { }, }; - beforeEach(async () => { - ctx.backendSrv.datasourceRequest = await jest.fn(params => { + beforeEach(() => { + ctx.backendSrv.datasourceRequest = jest.fn(params => { requestParams = params.data; return Promise.resolve({ data: response }); }); @@ -123,103 +128,6 @@ describe('CloudWatchDatasource', function() { done(); }); }); - - it('should generate the correct targets by expanding template variables', function() { - var templateSrv = { - variables: [ - { - name: 'instance_id', - options: [ - { text: 'i-23456789', value: 'i-23456789', selected: false }, - { text: 'i-34567890', value: 'i-34567890', selected: true }, - ], - current: { - text: 'i-34567890', - value: 'i-34567890', - }, - }, - ], - replace: function(target, scopedVars) { - if (target === '$instance_id' && scopedVars['instance_id']['text'] === 'i-34567890') { - return 'i-34567890'; - } else { - return ''; - } - }, - getVariableName: function(e) { - return 'instance_id'; - }, - variableExists: function(e) { - return true; - }, - containsVariable: function(str, variableName) { - return str.indexOf('$' + variableName) !== -1; - }, - }; - - var targets = [ - { - region: 'us-east-1', - namespace: 'AWS/EC2', - metricName: 'CPUUtilization', - dimensions: { - InstanceId: '$instance_id', - }, - statistics: ['Average'], - period: 300, - }, - ]; - - var result = ctx.ds.expandTemplateVariable(targets, {}, templateSrv); - expect(result[0].dimensions.InstanceId).toBe('i-34567890'); - }); - - it('should generate the correct targets by expanding template variables from url', function() { - var templateSrv = { - variables: [ - { - name: 'instance_id', - options: [ - { text: 'i-23456789', value: 'i-23456789', selected: false }, - { text: 'i-34567890', value: 'i-34567890', selected: false }, - ], - current: 'i-45678901', - }, - ], - replace: function(target, scopedVars) { - if (target === '$instance_id') { - return 'i-45678901'; - } else { - return ''; - } - }, - getVariableName: function(e) { - return 'instance_id'; - }, - variableExists: function(e) { - return true; - }, - containsVariable: function(str, variableName) { - return str.indexOf('$' + variableName) !== -1; - }, - }; - - var targets = [ - { - region: 'us-east-1', - namespace: 'AWS/EC2', - metricName: 'CPUUtilization', - dimensions: { - InstanceId: '$instance_id', - }, - statistics: ['Average'], - period: 300, - }, - ]; - - var result = ctx.ds.expandTemplateVariable(targets, {}, templateSrv); - expect(result[0].dimensions.InstanceId).toBe('i-45678901'); - }); }); describe('When query region is "default"', function() { From e51f208a49b4997de43495d9b3c16e7d6d790c37 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 9 Jul 2018 15:42:34 +0200 Subject: [PATCH 105/263] Cleanup and remove some jest.fn() --- .../datasource/cloudwatch/specs/datasource.jest.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts index b06d00b3ee4e..2dc6e57b1aa2 100644 --- a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts @@ -1,6 +1,5 @@ import '../datasource'; import CloudWatchDatasource from '../datasource'; -import 'app/features/dashboard/time_srv'; import * as dateMath from 'app/core/utils/datemath'; import _ from 'lodash'; @@ -10,19 +9,20 @@ describe('CloudWatchDatasource', function() { }; let templateSrv = { + data: {}, templateSettings: { interpolate: /\[\[([\s\S]+?)\]\]/g }, - replace: jest.fn(text => _.template(text, templateSrv.templateSettings)(templateSrv.data)), - variableExists: jest.fn(() => false), + replace: text => _.template(text, templateSrv.templateSettings)(templateSrv.data), + variableExists: () => false, }; let timeSrv = { time: { from: 'now-1h', to: 'now' }, - timeRange: jest.fn(() => { + timeRange: () => { return { from: dateMath.parse(timeSrv.time.from, false), to: dateMath.parse(timeSrv.time.to, true), }; - }), + }, }; let backendSrv = {}; let ctx = { From 8b32dc584705c87a45326cd185b19810d5d120e0 Mon Sep 17 00:00:00 2001 From: yogy rahmawan Date: Tue, 10 Jul 2018 13:25:32 +0700 Subject: [PATCH 106/263] move go vet out of scripts and fixing warning (#12552) --- .circleci/config.yml | 3 +++ pkg/services/notifications/notifications.go | 2 -- pkg/services/rendering/phantomjs.go | 4 +++- pkg/tsdb/elasticsearch/client/client.go | 2 +- pkg/tsdb/elasticsearch/client/models.go | 2 +- scripts/circle-test-backend.sh | 3 --- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f351040fe2fb..f1d161c3cacd 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -88,6 +88,9 @@ jobs: - run: name: run linters command: 'gometalinter.v2 --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...' + - run: + name: run go vet + command: 'go vet ./pkg/...' test-frontend: docker: diff --git a/pkg/services/notifications/notifications.go b/pkg/services/notifications/notifications.go index fcefa91243db..769fdd06fd04 100644 --- a/pkg/services/notifications/notifications.go +++ b/pkg/services/notifications/notifications.go @@ -98,8 +98,6 @@ func (ns *NotificationService) Run(ctx context.Context) error { return ctx.Err() } } - - return nil } func (ns *NotificationService) SendWebhookSync(ctx context.Context, cmd *m.SendWebhookSync) error { diff --git a/pkg/services/rendering/phantomjs.go b/pkg/services/rendering/phantomjs.go index 8e06b5fed9d8..87ccaf6b5d27 100644 --- a/pkg/services/rendering/phantomjs.go +++ b/pkg/services/rendering/phantomjs.go @@ -58,7 +58,9 @@ func (rs *RenderingService) renderViaPhantomJS(ctx context.Context, opts Opts) ( cmdArgs = append([]string{fmt.Sprintf("--output-encoding=%s", opts.Encoding)}, cmdArgs...) } - commandCtx, _ := context.WithTimeout(ctx, opts.Timeout+time.Second*2) + commandCtx, cancel := context.WithTimeout(ctx, opts.Timeout+time.Second*2) + defer cancel() + cmd := exec.CommandContext(commandCtx, binPath, cmdArgs...) cmd.Stderr = cmd.Stdout diff --git a/pkg/tsdb/elasticsearch/client/client.go b/pkg/tsdb/elasticsearch/client/client.go index efc3ed1bda21..dff626a79ebe 100644 --- a/pkg/tsdb/elasticsearch/client/client.go +++ b/pkg/tsdb/elasticsearch/client/client.go @@ -218,7 +218,7 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch elapsed := time.Now().Sub(start) clientLog.Debug("Decoded multisearch json response", "took", elapsed) - msr.status = res.StatusCode + msr.Status = res.StatusCode return &msr, nil } diff --git a/pkg/tsdb/elasticsearch/client/models.go b/pkg/tsdb/elasticsearch/client/models.go index a5810a9b1091..a0d257d01a6e 100644 --- a/pkg/tsdb/elasticsearch/client/models.go +++ b/pkg/tsdb/elasticsearch/client/models.go @@ -74,7 +74,7 @@ type MultiSearchRequest struct { // MultiSearchResponse represents a multi search response type MultiSearchResponse struct { - status int `json:"status,omitempty"` + Status int `json:"status,omitempty"` Responses []*SearchResponse `json:"responses"` } diff --git a/scripts/circle-test-backend.sh b/scripts/circle-test-backend.sh index a63d6354fa66..4740ef99f1a9 100755 --- a/scripts/circle-test-backend.sh +++ b/scripts/circle-test-backend.sh @@ -13,9 +13,6 @@ function exit_if_fail { echo "running go fmt" exit_if_fail test -z "$(gofmt -s -l ./pkg | tee /dev/stderr)" -echo "running go vet" -exit_if_fail test -z "$(go vet ./pkg/... | tee /dev/stderr)" - echo "building backend with install to cache pkgs" exit_if_fail time go install ./pkg/cmd/grafana-server From 55780972109190b57b3b1fd48a899ec35466e92e Mon Sep 17 00:00:00 2001 From: Dominic Smith Date: Tue, 10 Jul 2018 19:17:55 +1000 Subject: [PATCH 107/263] Add new sequential color scales * Add new sequential color scales * Simplify inversion logic --- package.json | 2 +- public/app/plugins/panel/heatmap/color_scale.ts | 2 +- public/app/plugins/panel/heatmap/heatmap_ctrl.ts | 9 ++++++++- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index a43b2adc5be1..c26438230cc8 100644 --- a/package.json +++ b/package.json @@ -149,7 +149,7 @@ "classnames": "^2.2.5", "clipboard": "^1.7.1", "d3": "^4.11.0", - "d3-scale-chromatic": "^1.1.1", + "d3-scale-chromatic": "^1.3.0", "eventemitter3": "^2.0.3", "file-saver": "^1.3.3", "immutable": "^3.8.2", diff --git a/public/app/plugins/panel/heatmap/color_scale.ts b/public/app/plugins/panel/heatmap/color_scale.ts index 408286395470..3550c981db2a 100644 --- a/public/app/plugins/panel/heatmap/color_scale.ts +++ b/public/app/plugins/panel/heatmap/color_scale.ts @@ -3,7 +3,7 @@ import * as d3ScaleChromatic from 'd3-scale-chromatic'; export function getColorScale(colorScheme: any, lightTheme: boolean, maxValue: number, minValue = 0): (d: any) => any { let colorInterpolator = d3ScaleChromatic[colorScheme.value]; - let colorScaleInverted = colorScheme.invert === 'always' || (colorScheme.invert === 'dark' && !lightTheme); + let colorScaleInverted = colorScheme.invert === 'always' || colorScheme.invert === (lightTheme ? 'light' : 'dark'); let start = colorScaleInverted ? maxValue : minValue; let end = colorScaleInverted ? minValue : maxValue; diff --git a/public/app/plugins/panel/heatmap/heatmap_ctrl.ts b/public/app/plugins/panel/heatmap/heatmap_ctrl.ts index 31a5afa630e2..1d35ff2ea84e 100644 --- a/public/app/plugins/panel/heatmap/heatmap_ctrl.ts +++ b/public/app/plugins/panel/heatmap/heatmap_ctrl.ts @@ -76,6 +76,13 @@ let colorSchemes = [ { name: 'Reds', value: 'interpolateReds', invert: 'dark' }, // Sequential (Multi-Hue) + { name: 'Viridis', value: 'interpolateViridis', invert: 'light' }, + { name: 'Magma', value: 'interpolateMagma', invert: 'light' }, + { name: 'Inferno', value: 'interpolateInferno', invert: 'light' }, + { name: 'Plasma', value: 'interpolatePlasma', invert: 'light' }, + { name: 'Warm', value: 'interpolateWarm', invert: 'light' }, + { name: 'Cool', value: 'interpolateCool', invert: 'light' }, + { name: 'Cubehelix', value: 'interpolateCubehelixDefault', invert: 'light' }, { name: 'BuGn', value: 'interpolateBuGn', invert: 'dark' }, { name: 'BuPu', value: 'interpolateBuPu', invert: 'dark' }, { name: 'GnBu', value: 'interpolateGnBu', invert: 'dark' }, @@ -87,7 +94,7 @@ let colorSchemes = [ { name: 'YlGnBu', value: 'interpolateYlGnBu', invert: 'dark' }, { name: 'YlGn', value: 'interpolateYlGn', invert: 'dark' }, { name: 'YlOrBr', value: 'interpolateYlOrBr', invert: 'dark' }, - { name: 'YlOrRd', value: 'interpolateYlOrRd', invert: 'darm' }, + { name: 'YlOrRd', value: 'interpolateYlOrRd', invert: 'dark' }, ]; const ds_support_histogram_sort = ['prometheus', 'elasticsearch']; From 4ee4ca99be159862a8990034e0087417174dfd09 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 10 Jul 2018 12:54:45 +0200 Subject: [PATCH 108/263] Prevent scroll on focus for iframe --- public/app/core/components/scroll/page_scroll.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/app/core/components/scroll/page_scroll.ts b/public/app/core/components/scroll/page_scroll.ts index e6db344a4d6b..0cb36eba9144 100644 --- a/public/app/core/components/scroll/page_scroll.ts +++ b/public/app/core/components/scroll/page_scroll.ts @@ -29,11 +29,11 @@ export function pageScrollbar() { scope.$on('$routeChangeSuccess', () => { lastPos = 0; elem[0].scrollTop = 0; - elem[0].focus(); + elem[0].focus({ preventScroll: true }); }); elem[0].tabIndex = -1; - elem[0].focus(); + elem[0].focus({ preventScroll: true }); }, }; } From daf0c374b363d81d2ff36f44317a64279b31aa3b Mon Sep 17 00:00:00 2001 From: "Bryan T. Richardson" Date: Tue, 10 Jul 2018 10:11:39 -0600 Subject: [PATCH 109/263] Added BurstBalance metric to list of AWS RDS metrics. --- pkg/tsdb/cloudwatch/metric_find_query.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index 136ee241c2e5..e8e2c894120b 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -92,7 +92,7 @@ func init() { "AWS/NetworkELB": {"ActiveFlowCount", "ConsumedLCUs", "HealthyHostCount", "NewFlowCount", "ProcessedBytes", "TCP_Client_Reset_Count", "TCP_ELB_Reset_Count", "TCP_Target_Reset_Count", "UnHealthyHostCount"}, "AWS/OpsWorks": {"cpu_idle", "cpu_nice", "cpu_system", "cpu_user", "cpu_waitio", "load_1", "load_5", "load_15", "memory_buffers", "memory_cached", "memory_free", "memory_swap", "memory_total", "memory_used", "procs"}, "AWS/Redshift": {"CPUUtilization", "DatabaseConnections", "HealthStatus", "MaintenanceMode", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "PercentageDiskSpaceUsed", "ReadIOPS", "ReadLatency", "ReadThroughput", "WriteIOPS", "WriteLatency", "WriteThroughput"}, - "AWS/RDS": {"ActiveTransactions", "AuroraBinlogReplicaLag", "AuroraReplicaLag", "AuroraReplicaLagMaximum", "AuroraReplicaLagMinimum", "BinLogDiskUsage", "BlockedTransactions", "BufferCacheHitRatio", "CommitLatency", "CommitThroughput", "BinLogDiskUsage", "CPUCreditBalance", "CPUCreditUsage", "CPUUtilization", "DatabaseConnections", "DDLLatency", "DDLThroughput", "Deadlocks", "DeleteLatency", "DeleteThroughput", "DiskQueueDepth", "DMLLatency", "DMLThroughput", "EngineUptime", "FailedSqlStatements", "FreeableMemory", "FreeLocalStorage", "FreeStorageSpace", "InsertLatency", "InsertThroughput", "LoginFailures", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "NetworkThroughput", "Queries", "ReadIOPS", "ReadLatency", "ReadThroughput", "ReplicaLag", "ResultSetCacheHitRatio", "SelectLatency", "SelectThroughput", "SwapUsage", "TotalConnections", "UpdateLatency", "UpdateThroughput", "VolumeBytesUsed", "VolumeReadIOPS", "VolumeWriteIOPS", "WriteIOPS", "WriteLatency", "WriteThroughput"}, + "AWS/RDS": {"ActiveTransactions", "AuroraBinlogReplicaLag", "AuroraReplicaLag", "AuroraReplicaLagMaximum", "AuroraReplicaLagMinimum", "BinLogDiskUsage", "BlockedTransactions", "BufferCacheHitRatio", "BurstBalance", "CommitLatency", "CommitThroughput", "BinLogDiskUsage", "CPUCreditBalance", "CPUCreditUsage", "CPUUtilization", "DatabaseConnections", "DDLLatency", "DDLThroughput", "Deadlocks", "DeleteLatency", "DeleteThroughput", "DiskQueueDepth", "DMLLatency", "DMLThroughput", "EngineUptime", "FailedSqlStatements", "FreeableMemory", "FreeLocalStorage", "FreeStorageSpace", "InsertLatency", "InsertThroughput", "LoginFailures", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "NetworkThroughput", "Queries", "ReadIOPS", "ReadLatency", "ReadThroughput", "ReplicaLag", "ResultSetCacheHitRatio", "SelectLatency", "SelectThroughput", "SwapUsage", "TotalConnections", "UpdateLatency", "UpdateThroughput", "VolumeBytesUsed", "VolumeReadIOPS", "VolumeWriteIOPS", "WriteIOPS", "WriteLatency", "WriteThroughput"}, "AWS/Route53": {"ChildHealthCheckHealthyCount", "HealthCheckStatus", "HealthCheckPercentageHealthy", "ConnectionTime", "SSLHandshakeTime", "TimeToFirstByte"}, "AWS/S3": {"BucketSizeBytes", "NumberOfObjects", "AllRequests", "GetRequests", "PutRequests", "DeleteRequests", "HeadRequests", "PostRequests", "ListRequests", "BytesDownloaded", "BytesUploaded", "4xxErrors", "5xxErrors", "FirstByteLatency", "TotalRequestLatency"}, "AWS/SES": {"Bounce", "Complaint", "Delivery", "Reject", "Send"}, From 4d2dd22095ba4f0f96a2d524cb43a7d71e771e5f Mon Sep 17 00:00:00 2001 From: David Date: Wed, 11 Jul 2018 12:29:19 +0200 Subject: [PATCH 110/263] Fix css loading in plugins (#12573) - allow css loader to be imported again (wasnt prefixed by plugin) --- public/app/features/plugins/plugin_loader.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/features/plugins/plugin_loader.ts b/public/app/features/plugins/plugin_loader.ts index f999ee7e2ff3..20023e27b5c9 100644 --- a/public/app/features/plugins/plugin_loader.ts +++ b/public/app/features/plugins/plugin_loader.ts @@ -56,7 +56,7 @@ System.config({ css: 'vendor/plugin-css/css.js', }, meta: { - 'plugin*': { + '*': { esModule: true, authorization: true, loader: 'plugin-loader', From cfa555b5695d76bf7e4033190f216885a38a38ad Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 11 Jul 2018 12:31:07 +0200 Subject: [PATCH 111/263] Add folder name to dashboard title (#12545) * Add folder name to dashboard title. No display on medium displays and below * Compare folderId instead * folderId bigger than 0 --- public/app/features/dashboard/dashnav/dashnav.html | 2 +- public/sass/components/_navbar.scss | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/public/app/features/dashboard/dashnav/dashnav.html b/public/app/features/dashboard/dashnav/dashnav.html index 269d4b0bada2..6ec272b5ca46 100644 --- a/public/app/features/dashboard/dashnav/dashnav.html +++ b/public/app/features/dashboard/dashnav/dashnav.html @@ -3,7 +3,7 @@ diff --git a/public/sass/components/_navbar.scss b/public/sass/components/_navbar.scss index 1a453b150697..0b7e3a79dcda 100644 --- a/public/sass/components/_navbar.scss +++ b/public/sass/components/_navbar.scss @@ -85,6 +85,14 @@ // icon hidden on smaller screens display: none; } + + &--folder { + color: $text-color-weak; + + @include media-breakpoint-down(md) { + display: none; + } + } } .navbar-buttons { From 2fbf2bba4e61af9ad7ea0c870364138387bd4400 Mon Sep 17 00:00:00 2001 From: Rollulus Rouloul Date: Wed, 11 Jul 2018 12:33:19 +0200 Subject: [PATCH 112/263] fix: requests/sec instead of requets (#12557) --- public/app/core/utils/kbn.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts index 4302e62e3e05..463025567cd1 100644 --- a/public/app/core/utils/kbn.ts +++ b/public/app/core/utils/kbn.ts @@ -957,7 +957,7 @@ kbn.getUnitFormats = function() { text: 'throughput', submenu: [ { text: 'ops/sec (ops)', value: 'ops' }, - { text: 'requets/sec (rps)', value: 'reqps' }, + { text: 'requests/sec (rps)', value: 'reqps' }, { text: 'reads/sec (rps)', value: 'rps' }, { text: 'writes/sec (wps)', value: 'wps' }, { text: 'I/O ops/sec (iops)', value: 'iops' }, From 2c22a7b4ba12966cac8e092167cb45c5fdadfeb0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Wed, 11 Jul 2018 13:31:34 +0200 Subject: [PATCH 113/263] Don't build-all for PRs --- .circleci/config.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f1d161c3cacd..44f34d429266 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -246,7 +246,7 @@ workflows: test-and-build: jobs: - build-all: - filters: *filter-not-release + filters: *filter-only-master - build-enterprise: filters: *filter-only-master - codespell: @@ -270,9 +270,7 @@ workflows: - gometalinter - mysql-integration-test - postgres-integration-test - filters: - branches: - only: master + filters: *filter-only-master - deploy-enterprise-master: requires: - build-all From 24f395f986d413606675303988c2eb4d93694853 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Gryglicki?= Date: Wed, 11 Jul 2018 19:06:36 +0200 Subject: [PATCH 114/263] Add support for skipping variable value in URL, fixes #12174 (#12541) * New rebase Signed-off-by: Lukasz Gryglicki * Lint Signed-off-by: Lukasz Gryglicki --- .../app/features/templating/adhoc_variable.ts | 2 + .../features/templating/constant_variable.ts | 2 + .../features/templating/custom_variable.ts | 2 + .../templating/datasource_variable.ts | 2 + .../features/templating/interval_variable.ts | 2 + .../app/features/templating/query_variable.ts | 2 + .../templating/specs/template_srv.jest.ts | 57 +++++++++++++++++++ .../app/features/templating/template_srv.ts | 6 ++ 8 files changed, 75 insertions(+) diff --git a/public/app/features/templating/adhoc_variable.ts b/public/app/features/templating/adhoc_variable.ts index babeaf1f34e5..9f8bd4c39a74 100644 --- a/public/app/features/templating/adhoc_variable.ts +++ b/public/app/features/templating/adhoc_variable.ts @@ -3,6 +3,7 @@ import { Variable, assignModelProperties, variableTypes } from './variable'; export class AdhocVariable implements Variable { filters: any[]; + skipUrlSync: boolean; defaults = { type: 'adhoc', @@ -11,6 +12,7 @@ export class AdhocVariable implements Variable { hide: 0, datasource: null, filters: [], + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/constant_variable.ts b/public/app/features/templating/constant_variable.ts index f2fb4294537f..e727c6e98afd 100644 --- a/public/app/features/templating/constant_variable.ts +++ b/public/app/features/templating/constant_variable.ts @@ -4,6 +4,7 @@ export class ConstantVariable implements Variable { query: string; options: any[]; current: any; + skipUrlSync: boolean; defaults = { type: 'constant', @@ -13,6 +14,7 @@ export class ConstantVariable implements Variable { query: '', current: {}, options: [], + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/custom_variable.ts b/public/app/features/templating/custom_variable.ts index c15178f66443..4490a41a38f8 100644 --- a/public/app/features/templating/custom_variable.ts +++ b/public/app/features/templating/custom_variable.ts @@ -7,6 +7,7 @@ export class CustomVariable implements Variable { includeAll: boolean; multi: boolean; current: any; + skipUrlSync: boolean; defaults = { type: 'custom', @@ -19,6 +20,7 @@ export class CustomVariable implements Variable { includeAll: false, multi: false, allValue: null, + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/datasource_variable.ts b/public/app/features/templating/datasource_variable.ts index 4c326a94e3b7..519ce21e4d40 100644 --- a/public/app/features/templating/datasource_variable.ts +++ b/public/app/features/templating/datasource_variable.ts @@ -7,6 +7,7 @@ export class DatasourceVariable implements Variable { options: any; current: any; refresh: any; + skipUrlSync: boolean; defaults = { type: 'datasource', @@ -18,6 +19,7 @@ export class DatasourceVariable implements Variable { options: [], query: '', refresh: 1, + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/interval_variable.ts b/public/app/features/templating/interval_variable.ts index 3faac316f98e..b932819a7b7b 100644 --- a/public/app/features/templating/interval_variable.ts +++ b/public/app/features/templating/interval_variable.ts @@ -11,6 +11,7 @@ export class IntervalVariable implements Variable { query: string; refresh: number; current: any; + skipUrlSync: boolean; defaults = { type: 'interval', @@ -24,6 +25,7 @@ export class IntervalVariable implements Variable { auto: false, auto_min: '10s', auto_count: 30, + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/query_variable.ts b/public/app/features/templating/query_variable.ts index 54bd7bb660cd..5ddd6d328646 100644 --- a/public/app/features/templating/query_variable.ts +++ b/public/app/features/templating/query_variable.ts @@ -22,6 +22,7 @@ export class QueryVariable implements Variable { tagsQuery: string; tagValuesQuery: string; tags: any[]; + skipUrlSync: boolean; defaults = { type: 'query', @@ -42,6 +43,7 @@ export class QueryVariable implements Variable { useTags: false, tagsQuery: '', tagValuesQuery: '', + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/specs/template_srv.jest.ts b/public/app/features/templating/specs/template_srv.jest.ts index 59915776b4f6..86b6aa7ec99d 100644 --- a/public/app/features/templating/specs/template_srv.jest.ts +++ b/public/app/features/templating/specs/template_srv.jest.ts @@ -345,6 +345,49 @@ describe('templateSrv', function() { }); }); + describe('fillVariableValuesForUrl skip url sync', function() { + beforeEach(function() { + initTemplateSrv([ + { + name: 'test', + skipUrlSync: true, + current: { value: 'value' }, + getValueForUrl: function() { + return this.current.value; + }, + }, + ]); + }); + + it('should not include template variable value in url', function() { + var params = {}; + _templateSrv.fillVariableValuesForUrl(params); + expect(params['var-test']).toBe(undefined); + }); + }); + + describe('fillVariableValuesForUrl with multi value with skip url sync', function() { + beforeEach(function() { + initTemplateSrv([ + { + type: 'query', + name: 'test', + skipUrlSync: true, + current: { value: ['val1', 'val2'] }, + getValueForUrl: function() { + return this.current.value; + }, + }, + ]); + }); + + it('should not include template variable value in url', function() { + var params = {}; + _templateSrv.fillVariableValuesForUrl(params); + expect(params['var-test']).toBe(undefined); + }); + }); + describe('fillVariableValuesForUrl with multi value and scopedVars', function() { beforeEach(function() { initTemplateSrv([{ type: 'query', name: 'test', current: { value: ['val1', 'val2'] } }]); @@ -359,6 +402,20 @@ describe('templateSrv', function() { }); }); + describe('fillVariableValuesForUrl with multi value, scopedVars and skip url sync', function() { + beforeEach(function() { + initTemplateSrv([{ type: 'query', name: 'test', current: { value: ['val1', 'val2'] } }]); + }); + + it('should not set scoped value as url params', function() { + var params = {}; + _templateSrv.fillVariableValuesForUrl(params, { + test: { name: 'test', value: 'val1', skipUrlSync: true }, + }); + expect(params['var-test']).toBe(undefined); + }); + }); + describe('replaceWithText', function() { beforeEach(function() { initTemplateSrv([ diff --git a/public/app/features/templating/template_srv.ts b/public/app/features/templating/template_srv.ts index cdabe577f960..fc79d12ff9ee 100644 --- a/public/app/features/templating/template_srv.ts +++ b/public/app/features/templating/template_srv.ts @@ -250,8 +250,14 @@ export class TemplateSrv { fillVariableValuesForUrl(params, scopedVars) { _.each(this.variables, function(variable) { if (scopedVars && scopedVars[variable.name] !== void 0) { + if (scopedVars[variable.name].skipUrlSync) { + return; + } params['var-' + variable.name] = scopedVars[variable.name].value; } else { + if (variable.skipUrlSync) { + return; + } params['var-' + variable.name] = variable.getValueForUrl(); } }); From b4e0ace7a2f5d50d84745726349bd4a475aa7776 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Wed, 11 Jul 2018 10:58:06 -0700 Subject: [PATCH 115/263] fix: folder picker did not notify parent that the initial folder had been changed, fixes #12543 (#12554) --- .../dashboard/folder_picker/folder_picker.ts | 21 +++++++------------ 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/public/app/features/dashboard/folder_picker/folder_picker.ts b/public/app/features/dashboard/folder_picker/folder_picker.ts index 28338c29d330..352b29d27a04 100644 --- a/public/app/features/dashboard/folder_picker/folder_picker.ts +++ b/public/app/features/dashboard/folder_picker/folder_picker.ts @@ -104,10 +104,7 @@ export class FolderPickerCtrl { appEvents.emit('alert-success', ['Folder Created', 'OK']); this.closeCreateFolder(); - this.folder = { - text: result.title, - value: result.id, - }; + this.folder = { text: result.title, value: result.id }; this.onFolderChange(this.folder); }); } @@ -149,17 +146,14 @@ export class FolderPickerCtrl { folder = result.length > 0 ? result[0] : resetFolder; } } + this.folder = folder; - this.onFolderLoad(); - }); - } - private onFolderLoad() { - if (this.onLoad) { - this.onLoad({ - $folder: { id: this.folder.value, title: this.folder.text }, - }); - } + // if this is not the same as our initial value notify parent + if (this.folder.id !== this.initialFolderId) { + this.onChange({ $folder: { id: this.folder.value, title: this.folder.text } }); + } + }); } } @@ -176,7 +170,6 @@ export function folderPicker() { labelClass: '@', rootName: '@', onChange: '&', - onLoad: '&', onCreateFolder: '&', enterFolderCreation: '&', exitFolderCreation: '&', From 18a8290c65007bf86c074dc5f3f2b1bdb7c6c3d4 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Thu, 12 Jul 2018 03:13:47 +0900 Subject: [PATCH 116/263] (prometheus) prevent error to use $__interval_ms in query (#12533) * prevent error to use $__interval_ms in query * add test * prevent error to use $__interval_ms in query --- .../app/features/panel/metrics_panel_ctrl.ts | 2 +- .../datasource/prometheus/datasource.ts | 2 +- .../prometheus/specs/datasource_specs.ts | 36 +++++++++---------- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/public/app/features/panel/metrics_panel_ctrl.ts b/public/app/features/panel/metrics_panel_ctrl.ts index 75c0de3bc6e6..6eb6d3b3b007 100644 --- a/public/app/features/panel/metrics_panel_ctrl.ts +++ b/public/app/features/panel/metrics_panel_ctrl.ts @@ -222,7 +222,7 @@ class MetricsPanelCtrl extends PanelCtrl { // and add built in variables interval and interval_ms var scopedVars = Object.assign({}, this.panel.scopedVars, { __interval: { text: this.interval, value: this.interval }, - __interval_ms: { text: this.intervalMs, value: this.intervalMs }, + __interval_ms: { text: String(this.intervalMs), value: String(this.intervalMs) }, }); var metricsQuery = { diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts index d7d33264c99c..88d6141696d9 100644 --- a/public/app/plugins/datasource/prometheus/datasource.ts +++ b/public/app/plugins/datasource/prometheus/datasource.ts @@ -196,7 +196,7 @@ export class PrometheusDatasource { interval = adjustedInterval; scopedVars = Object.assign({}, options.scopedVars, { __interval: { text: interval + 's', value: interval + 's' }, - __interval_ms: { text: interval * 1000, value: interval * 1000 }, + __interval_ms: { text: String(interval * 1000), value: String(interval * 1000) }, }); } query.step = interval; diff --git a/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts b/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts index c5da671b7576..09aa934dd63e 100644 --- a/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts +++ b/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts @@ -452,7 +452,7 @@ describe('PrometheusDatasource', function() { interval: '10s', scopedVars: { __interval: { text: '10s', value: '10s' }, - __interval_ms: { text: 10 * 1000, value: 10 * 1000 }, + __interval_ms: { text: String(10 * 1000), value: String(10 * 1000) }, }, }; var urlExpected = @@ -463,8 +463,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('10s'); expect(query.scopedVars.__interval.value).to.be('10s'); - expect(query.scopedVars.__interval_ms.text).to.be(10 * 1000); - expect(query.scopedVars.__interval_ms.value).to.be(10 * 1000); + expect(query.scopedVars.__interval_ms.text).to.be(String(10 * 1000)); + expect(query.scopedVars.__interval_ms.value).to.be(String(10 * 1000)); }); it('should be min interval when it is greater than auto interval', function() { var query = { @@ -479,7 +479,7 @@ describe('PrometheusDatasource', function() { interval: '5s', scopedVars: { __interval: { text: '5s', value: '5s' }, - __interval_ms: { text: 5 * 1000, value: 5 * 1000 }, + __interval_ms: { text: String(5 * 1000), value: String(5 * 1000) }, }, }; var urlExpected = @@ -490,8 +490,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('5s'); expect(query.scopedVars.__interval.value).to.be('5s'); - expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000); - expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000); + expect(query.scopedVars.__interval_ms.text).to.be(String(5 * 1000)); + expect(query.scopedVars.__interval_ms.value).to.be(String(5 * 1000)); }); it('should account for intervalFactor', function() { var query = { @@ -507,7 +507,7 @@ describe('PrometheusDatasource', function() { interval: '10s', scopedVars: { __interval: { text: '10s', value: '10s' }, - __interval_ms: { text: 10 * 1000, value: 10 * 1000 }, + __interval_ms: { text: String(10 * 1000), value: String(10 * 1000) }, }, }; var urlExpected = @@ -518,8 +518,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('10s'); expect(query.scopedVars.__interval.value).to.be('10s'); - expect(query.scopedVars.__interval_ms.text).to.be(10 * 1000); - expect(query.scopedVars.__interval_ms.value).to.be(10 * 1000); + expect(query.scopedVars.__interval_ms.text).to.be(String(10 * 1000)); + expect(query.scopedVars.__interval_ms.value).to.be(String(10 * 1000)); }); it('should be interval * intervalFactor when greater than min interval', function() { var query = { @@ -535,7 +535,7 @@ describe('PrometheusDatasource', function() { interval: '5s', scopedVars: { __interval: { text: '5s', value: '5s' }, - __interval_ms: { text: 5 * 1000, value: 5 * 1000 }, + __interval_ms: { text: String(5 * 1000), value: String(5 * 1000) }, }, }; var urlExpected = @@ -546,8 +546,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('5s'); expect(query.scopedVars.__interval.value).to.be('5s'); - expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000); - expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000); + expect(query.scopedVars.__interval_ms.text).to.be(String(5 * 1000)); + expect(query.scopedVars.__interval_ms.value).to.be(String(5 * 1000)); }); it('should be min interval when greater than interval * intervalFactor', function() { var query = { @@ -563,7 +563,7 @@ describe('PrometheusDatasource', function() { interval: '5s', scopedVars: { __interval: { text: '5s', value: '5s' }, - __interval_ms: { text: 5 * 1000, value: 5 * 1000 }, + __interval_ms: { text: String(5 * 1000), value: String(5 * 1000) }, }, }; var urlExpected = @@ -574,8 +574,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('5s'); expect(query.scopedVars.__interval.value).to.be('5s'); - expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000); - expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000); + expect(query.scopedVars.__interval_ms.text).to.be(String(5 * 1000)); + expect(query.scopedVars.__interval_ms.value).to.be(String(5 * 1000)); }); it('should be determined by the 11000 data points limit, accounting for intervalFactor', function() { var query = { @@ -590,7 +590,7 @@ describe('PrometheusDatasource', function() { interval: '5s', scopedVars: { __interval: { text: '5s', value: '5s' }, - __interval_ms: { text: 5 * 1000, value: 5 * 1000 }, + __interval_ms: { text: String(5 * 1000), value: String(5 * 1000) }, }, }; var end = 7 * 24 * 60 * 60; @@ -609,8 +609,8 @@ describe('PrometheusDatasource', function() { expect(query.scopedVars.__interval.text).to.be('5s'); expect(query.scopedVars.__interval.value).to.be('5s'); - expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000); - expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000); + expect(query.scopedVars.__interval_ms.text).to.be(String(5 * 1000)); + expect(query.scopedVars.__interval_ms.value).to.be(String(5 * 1000)); }); }); }); From c03764ff8a47bcc9bcd007de2345baa53d80294e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Wed, 11 Jul 2018 11:23:07 -0700 Subject: [PATCH 117/263] Refactor team pages to react & design change (#12574) * Rewriting team pages in react * teams to react progress * teams: getting team by id returns same DTO as search, needed for AvatarUrl * teams: progress on new team pages * fix: team test * listing team members and removing team members now works * teams: team member page now works * ux: fixed adding team member issue * refactoring TeamPicker to conform to react coding styles better * teams: very close to being done with team page rewrite * minor style tweak * ux: polish to team pages * feature: team pages in react & everything working * fix: removed flickering when changing tabs by always rendering PageHeader --- pkg/api/alerting_test.go | 2 +- pkg/api/annotations_test.go | 2 +- pkg/api/dashboard_snapshot_test.go | 2 +- pkg/api/dashboard_test.go | 4 +- pkg/api/team.go | 1 + pkg/api/team_test.go | 2 +- pkg/models/team.go | 16 +- pkg/services/guardian/guardian.go | 10 +- pkg/services/guardian/guardian_util_test.go | 6 +- pkg/services/sqlstore/team.go | 44 +++-- .../ManageDashboards/FolderPermissions.tsx | 4 +- public/app/containers/Teams/TeamGroupSync.tsx | 149 +++++++++++++++++ public/app/containers/Teams/TeamList.tsx | 125 ++++++++++++++ public/app/containers/Teams/TeamMembers.tsx | 144 ++++++++++++++++ public/app/containers/Teams/TeamPages.tsx | 77 +++++++++ public/app/containers/Teams/TeamSettings.tsx | 69 ++++++++ public/app/core/angular_wrappers.ts | 2 - public/app/core/components/Forms/Forms.tsx | 21 +++ .../Permissions/AddPermissions.jest.tsx | 48 +++--- .../components/Permissions/AddPermissions.tsx | 61 +++---- .../Permissions/DashboardPermissions.tsx | 7 +- .../DisabledPermissionsListItem.tsx | 2 +- .../Permissions/PermissionsListItem.tsx | 2 +- .../components/Picker/DescriptionPicker.tsx | 10 +- .../components/Picker/TeamPicker.jest.tsx | 24 +-- .../app/core/components/Picker/TeamPicker.tsx | 38 ++--- .../components/Picker/UserPicker.jest.tsx | 21 +-- .../app/core/components/Picker/UserPicker.tsx | 57 ++++--- .../app/core/components/Picker/withPicker.tsx | 34 ---- public/app/core/components/grafana_app.ts | 4 +- public/app/core/components/team_picker.ts | 64 ------- public/app/core/components/user_picker.ts | 71 -------- public/app/core/core.ts | 4 - public/app/core/services/backend_srv.ts | 14 ++ public/app/features/org/all.ts | 2 - .../features/org/partials/team_details.html | 105 ------------ public/app/features/org/partials/teams.html | 68 -------- .../org/specs/team_details_ctrl.jest.ts | 42 ----- public/app/features/org/team_details_ctrl.ts | 108 ------------ public/app/features/org/teams_ctrl.ts | 66 -------- public/app/routes/routes.ts | 20 ++- public/app/stores/NavStore/NavItem.ts | 3 +- public/app/stores/NavStore/NavStore.ts | 40 +++++ public/app/stores/RootStore/RootStore.ts | 4 + public/app/stores/TeamsStore/TeamsStore.ts | 156 ++++++++++++++++++ public/sass/components/_gf-form.scss | 4 +- public/test/jest-shim.ts | 13 +- 47 files changed, 1015 insertions(+), 757 deletions(-) create mode 100644 public/app/containers/Teams/TeamGroupSync.tsx create mode 100644 public/app/containers/Teams/TeamList.tsx create mode 100644 public/app/containers/Teams/TeamMembers.tsx create mode 100644 public/app/containers/Teams/TeamPages.tsx create mode 100644 public/app/containers/Teams/TeamSettings.tsx create mode 100644 public/app/core/components/Forms/Forms.tsx delete mode 100644 public/app/core/components/Picker/withPicker.tsx delete mode 100644 public/app/core/components/team_picker.ts delete mode 100644 public/app/core/components/user_picker.ts delete mode 100644 public/app/features/org/partials/team_details.html delete mode 100755 public/app/features/org/partials/teams.html delete mode 100644 public/app/features/org/specs/team_details_ctrl.jest.ts delete mode 100644 public/app/features/org/team_details_ctrl.ts delete mode 100644 public/app/features/org/teams_ctrl.ts create mode 100644 public/app/stores/TeamsStore/TeamsStore.ts diff --git a/pkg/api/alerting_test.go b/pkg/api/alerting_test.go index 9eba0e0d5b64..331beeef5e45 100644 --- a/pkg/api/alerting_test.go +++ b/pkg/api/alerting_test.go @@ -31,7 +31,7 @@ func TestAlertingApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) diff --git a/pkg/api/annotations_test.go b/pkg/api/annotations_test.go index 6590eb19ff28..08f3018c6941 100644 --- a/pkg/api/annotations_test.go +++ b/pkg/api/annotations_test.go @@ -119,7 +119,7 @@ func TestAnnotationsApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) diff --git a/pkg/api/dashboard_snapshot_test.go b/pkg/api/dashboard_snapshot_test.go index 5e7637a24e13..e58f2c4712d1 100644 --- a/pkg/api/dashboard_snapshot_test.go +++ b/pkg/api/dashboard_snapshot_test.go @@ -39,7 +39,7 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) { return nil }) - teamResp := []*m.Team{} + teamResp := []*m.TeamDTO{} bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { query.Result = teamResp return nil diff --git a/pkg/api/dashboard_test.go b/pkg/api/dashboard_test.go index 50a2e314f5ca..283a9b5f12ce 100644 --- a/pkg/api/dashboard_test.go +++ b/pkg/api/dashboard_test.go @@ -61,7 +61,7 @@ func TestDashboardApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) @@ -230,7 +230,7 @@ func TestDashboardApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) diff --git a/pkg/api/team.go b/pkg/api/team.go index 9919305881bc..ebb426c4c820 100644 --- a/pkg/api/team.go +++ b/pkg/api/team.go @@ -93,5 +93,6 @@ func GetTeamByID(c *m.ReqContext) Response { return Error(500, "Failed to get Team", err) } + query.Result.AvatarUrl = dtos.GetGravatarUrlWithDefault(query.Result.Email, query.Result.Name) return JSON(200, &query.Result) } diff --git a/pkg/api/team_test.go b/pkg/api/team_test.go index 0bf06d723c8c..a19842888707 100644 --- a/pkg/api/team_test.go +++ b/pkg/api/team_test.go @@ -13,7 +13,7 @@ import ( func TestTeamApiEndpoint(t *testing.T) { Convey("Given two teams", t, func() { mockResult := models.SearchTeamQueryResult{ - Teams: []*models.SearchTeamDto{ + Teams: []*models.TeamDTO{ {Name: "team1"}, {Name: "team2"}, }, diff --git a/pkg/models/team.go b/pkg/models/team.go index 9c679a133941..61285db3a5fc 100644 --- a/pkg/models/team.go +++ b/pkg/models/team.go @@ -49,13 +49,13 @@ type DeleteTeamCommand struct { type GetTeamByIdQuery struct { OrgId int64 Id int64 - Result *Team + Result *TeamDTO } type GetTeamsByUserQuery struct { OrgId int64 - UserId int64 `json:"userId"` - Result []*Team `json:"teams"` + UserId int64 `json:"userId"` + Result []*TeamDTO `json:"teams"` } type SearchTeamsQuery struct { @@ -68,7 +68,7 @@ type SearchTeamsQuery struct { Result SearchTeamQueryResult } -type SearchTeamDto struct { +type TeamDTO struct { Id int64 `json:"id"` OrgId int64 `json:"orgId"` Name string `json:"name"` @@ -78,8 +78,8 @@ type SearchTeamDto struct { } type SearchTeamQueryResult struct { - TotalCount int64 `json:"totalCount"` - Teams []*SearchTeamDto `json:"teams"` - Page int `json:"page"` - PerPage int `json:"perPage"` + TotalCount int64 `json:"totalCount"` + Teams []*TeamDTO `json:"teams"` + Page int `json:"page"` + PerPage int `json:"perPage"` } diff --git a/pkg/services/guardian/guardian.go b/pkg/services/guardian/guardian.go index cfd8f5c3a6e1..7506338c5f07 100644 --- a/pkg/services/guardian/guardian.go +++ b/pkg/services/guardian/guardian.go @@ -30,7 +30,7 @@ type dashboardGuardianImpl struct { dashId int64 orgId int64 acl []*m.DashboardAclInfoDTO - groups []*m.Team + teams []*m.TeamDTO log log.Logger } @@ -186,15 +186,15 @@ func (g *dashboardGuardianImpl) GetAcl() ([]*m.DashboardAclInfoDTO, error) { return g.acl, nil } -func (g *dashboardGuardianImpl) getTeams() ([]*m.Team, error) { - if g.groups != nil { - return g.groups, nil +func (g *dashboardGuardianImpl) getTeams() ([]*m.TeamDTO, error) { + if g.teams != nil { + return g.teams, nil } query := m.GetTeamsByUserQuery{OrgId: g.orgId, UserId: g.user.UserId} err := bus.Dispatch(&query) - g.groups = query.Result + g.teams = query.Result return query.Result, err } diff --git a/pkg/services/guardian/guardian_util_test.go b/pkg/services/guardian/guardian_util_test.go index 3d839e71b740..d85548ecb8c0 100644 --- a/pkg/services/guardian/guardian_util_test.go +++ b/pkg/services/guardian/guardian_util_test.go @@ -19,7 +19,7 @@ type scenarioContext struct { givenUser *m.SignedInUser givenDashboardID int64 givenPermissions []*m.DashboardAclInfoDTO - givenTeams []*m.Team + givenTeams []*m.TeamDTO updatePermissions []*m.DashboardAcl expectedFlags permissionFlags callerFile string @@ -84,11 +84,11 @@ func permissionScenario(desc string, dashboardID int64, sc *scenarioContext, per return nil }) - teams := []*m.Team{} + teams := []*m.TeamDTO{} for _, p := range permissions { if p.TeamId > 0 { - teams = append(teams, &m.Team{Id: p.TeamId}) + teams = append(teams, &m.TeamDTO{Id: p.TeamId}) } } diff --git a/pkg/services/sqlstore/team.go b/pkg/services/sqlstore/team.go index 9378ca37f607..72955df9a6ad 100644 --- a/pkg/services/sqlstore/team.go +++ b/pkg/services/sqlstore/team.go @@ -22,6 +22,16 @@ func init() { bus.AddHandler("sql", GetTeamMembers) } +func getTeamSelectSqlBase() string { + return `SELECT + team.id as id, + team.org_id, + team.name as name, + team.email as email, + (SELECT COUNT(*) from team_member where team_member.team_id = team.id) as member_count + FROM team as team ` +} + func CreateTeam(cmd *m.CreateTeamCommand) error { return inTransaction(func(sess *DBSession) error { @@ -130,21 +140,15 @@ func isTeamNameTaken(orgId int64, name string, existingId int64, sess *DBSession func SearchTeams(query *m.SearchTeamsQuery) error { query.Result = m.SearchTeamQueryResult{ - Teams: make([]*m.SearchTeamDto, 0), + Teams: make([]*m.TeamDTO, 0), } queryWithWildcards := "%" + query.Query + "%" var sql bytes.Buffer params := make([]interface{}, 0) - sql.WriteString(`select - team.id as id, - team.org_id, - team.name as name, - team.email as email, - (select count(*) from team_member where team_member.team_id = team.id) as member_count - from team as team - where team.org_id = ?`) + sql.WriteString(getTeamSelectSqlBase()) + sql.WriteString(` WHERE team.org_id = ?`) params = append(params, query.OrgId) @@ -186,8 +190,14 @@ func SearchTeams(query *m.SearchTeamsQuery) error { } func GetTeamById(query *m.GetTeamByIdQuery) error { - var team m.Team - exists, err := x.Where("org_id=? and id=?", query.OrgId, query.Id).Get(&team) + var sql bytes.Buffer + + sql.WriteString(getTeamSelectSqlBase()) + sql.WriteString(` WHERE team.org_id = ? and team.id = ?`) + + var team m.TeamDTO + exists, err := x.Sql(sql.String(), query.OrgId, query.Id).Get(&team) + if err != nil { return err } @@ -202,13 +212,15 @@ func GetTeamById(query *m.GetTeamByIdQuery) error { // GetTeamsByUser is used by the Guardian when checking a users' permissions func GetTeamsByUser(query *m.GetTeamsByUserQuery) error { - query.Result = make([]*m.Team, 0) + query.Result = make([]*m.TeamDTO, 0) + + var sql bytes.Buffer - sess := x.Table("team") - sess.Join("INNER", "team_member", "team.id=team_member.team_id") - sess.Where("team.org_id=? and team_member.user_id=?", query.OrgId, query.UserId) + sql.WriteString(getTeamSelectSqlBase()) + sql.WriteString(` INNER JOIN team_member on team.id = team_member.team_id`) + sql.WriteString(` WHERE team.org_id = ? and team_member.user_id = ?`) - err := sess.Find(&query.Result) + err := x.Sql(sql.String(), query.OrgId, query.UserId).Find(&query.Result) return err } diff --git a/public/app/containers/ManageDashboards/FolderPermissions.tsx b/public/app/containers/ManageDashboards/FolderPermissions.tsx index abbde63a179d..aac5d32750a3 100644 --- a/public/app/containers/ManageDashboards/FolderPermissions.tsx +++ b/public/app/containers/ManageDashboards/FolderPermissions.tsx @@ -54,7 +54,7 @@ export class FolderPermissions extends Component {
    -

    Folder Permissions

    +

    Folder Permissions

    @@ -68,7 +68,7 @@ export class FolderPermissions extends Component {
    - +
    diff --git a/public/app/containers/Teams/TeamGroupSync.tsx b/public/app/containers/Teams/TeamGroupSync.tsx new file mode 100644 index 000000000000..323dceae0d87 --- /dev/null +++ b/public/app/containers/Teams/TeamGroupSync.tsx @@ -0,0 +1,149 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { observer } from 'mobx-react'; +import { ITeam, ITeamGroup } from 'app/stores/TeamsStore/TeamsStore'; +import SlideDown from 'app/core/components/Animations/SlideDown'; +import Tooltip from 'app/core/components/Tooltip/Tooltip'; + +interface Props { + team: ITeam; +} + +interface State { + isAdding: boolean; + newGroupId?: string; +} + +const headerTooltip = `Sync LDAP or OAuth groups with your Grafana teams.`; + +@observer +export class TeamGroupSync extends React.Component { + constructor(props) { + super(props); + this.state = { isAdding: false, newGroupId: '' }; + } + + componentDidMount() { + this.props.team.loadGroups(); + } + + renderGroup(group: ITeamGroup) { + return ( + + {group.groupId} + + this.onRemoveGroup(group)}> + + + + + ); + } + + onToggleAdding = () => { + this.setState({ isAdding: !this.state.isAdding }); + }; + + onNewGroupIdChanged = evt => { + this.setState({ newGroupId: evt.target.value }); + }; + + onAddGroup = () => { + this.props.team.addGroup(this.state.newGroupId); + this.setState({ isAdding: false, newGroupId: '' }); + }; + + onRemoveGroup = (group: ITeamGroup) => { + this.props.team.removeGroup(group.groupId); + }; + + isNewGroupValid() { + return this.state.newGroupId.length > 1; + } + + render() { + const { isAdding, newGroupId } = this.state; + const groups = this.props.team.groups.values(); + + return ( +
    +
    +

    External group sync

    + + + +
    + {groups.length > 0 && ( + + )} +
    + + +
    + +
    Add External Group
    +
    +
    + +
    + +
    + +
    +
    +
    +
    + + {groups.length === 0 && + !isAdding && ( +
    +
    There are no external groups to sync with
    + +
    + {headerTooltip} + + Learn more + +
    +
    + )} + + {groups.length > 0 && ( +
    + + + + + + + {groups.map(group => this.renderGroup(group))} +
    External Group ID +
    +
    + )} +
    + ); + } +} + +export default hot(module)(TeamGroupSync); diff --git a/public/app/containers/Teams/TeamList.tsx b/public/app/containers/Teams/TeamList.tsx new file mode 100644 index 000000000000..4429764b1cc9 --- /dev/null +++ b/public/app/containers/Teams/TeamList.tsx @@ -0,0 +1,125 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { inject, observer } from 'mobx-react'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import { NavStore } from 'app/stores/NavStore/NavStore'; +import { TeamsStore, ITeam } from 'app/stores/TeamsStore/TeamsStore'; +import { BackendSrv } from 'app/core/services/backend_srv'; +import appEvents from 'app/core/app_events'; + +interface Props { + nav: typeof NavStore.Type; + teams: typeof TeamsStore.Type; + backendSrv: BackendSrv; +} + +@inject('nav', 'teams') +@observer +export class TeamList extends React.Component { + constructor(props) { + super(props); + + this.props.nav.load('cfg', 'teams'); + this.fetchTeams(); + } + + fetchTeams() { + this.props.teams.loadTeams(); + } + + deleteTeam(team: ITeam) { + appEvents.emit('confirm-modal', { + title: 'Delete', + text: 'Are you sure you want to delete Team ' + team.name + '?', + yesText: 'Delete', + icon: 'fa-warning', + onConfirm: () => { + this.deleteTeamConfirmed(team); + }, + }); + } + + deleteTeamConfirmed(team) { + this.props.backendSrv.delete('/api/teams/' + team.id).then(this.fetchTeams.bind(this)); + } + + onSearchQueryChange = evt => { + this.props.teams.setSearchQuery(evt.target.value); + }; + + renderTeamMember(team: ITeam): JSX.Element { + let teamUrl = `org/teams/edit/${team.id}`; + + return ( + + + + + + + + {team.name} + + + {team.email} + + + {team.memberCount} + + + this.deleteTeam(team)} className="btn btn-danger btn-small"> + + + + + ); + } + + render() { + const { nav, teams } = this.props; + return ( +
    + +
    +
    +
    + +
    + + + +
    + + + + + + + + + {teams.filteredTeams.map(team => this.renderTeamMember(team))} +
    + NameEmailMembers +
    +
    +
    +
    + ); + } +} + +export default hot(module)(TeamList); diff --git a/public/app/containers/Teams/TeamMembers.tsx b/public/app/containers/Teams/TeamMembers.tsx new file mode 100644 index 000000000000..0d0762469a04 --- /dev/null +++ b/public/app/containers/Teams/TeamMembers.tsx @@ -0,0 +1,144 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { observer } from 'mobx-react'; +import { ITeam, ITeamMember } from 'app/stores/TeamsStore/TeamsStore'; +import appEvents from 'app/core/app_events'; +import SlideDown from 'app/core/components/Animations/SlideDown'; +import { UserPicker, User } from 'app/core/components/Picker/UserPicker'; + +interface Props { + team: ITeam; +} + +interface State { + isAdding: boolean; + newTeamMember?: User; +} + +@observer +export class TeamMembers extends React.Component { + constructor(props) { + super(props); + this.state = { isAdding: false, newTeamMember: null }; + } + + componentDidMount() { + this.props.team.loadMembers(); + } + + onSearchQueryChange = evt => { + this.props.team.setSearchQuery(evt.target.value); + }; + + removeMember(member: ITeamMember) { + appEvents.emit('confirm-modal', { + title: 'Remove Member', + text: 'Are you sure you want to remove ' + member.login + ' from this group?', + yesText: 'Remove', + icon: 'fa-warning', + onConfirm: () => { + this.removeMemberConfirmed(member); + }, + }); + } + + removeMemberConfirmed(member: ITeamMember) { + this.props.team.removeMember(member); + } + + renderMember(member: ITeamMember) { + return ( + + + + + {member.login} + {member.email} + + this.removeMember(member)} className="btn btn-danger btn-mini"> + + + + + ); + } + + onToggleAdding = () => { + this.setState({ isAdding: !this.state.isAdding }); + }; + + onUserSelected = (user: User) => { + this.setState({ newTeamMember: user }); + }; + + onAddUserToTeam = async () => { + await this.props.team.addMember(this.state.newTeamMember.id); + await this.props.team.loadMembers(); + this.setState({ newTeamMember: null }); + }; + + render() { + const { newTeamMember, isAdding } = this.state; + const members = this.props.team.members.values(); + const newTeamMemberValue = newTeamMember && newTeamMember.id.toString(); + + return ( +
    +
    +
    + +
    + +
    + + +
    + + +
    + +
    Add Team Member
    +
    + + + {this.state.newTeamMember && ( + + )} +
    +
    +
    + +
    + + + + + + + + {members.map(member => this.renderMember(member))} +
    + NameEmail +
    +
    +
    + ); + } +} + +export default hot(module)(TeamMembers); diff --git a/public/app/containers/Teams/TeamPages.tsx b/public/app/containers/Teams/TeamPages.tsx new file mode 100644 index 000000000000..500a7cbe5e81 --- /dev/null +++ b/public/app/containers/Teams/TeamPages.tsx @@ -0,0 +1,77 @@ +import React from 'react'; +import _ from 'lodash'; +import { hot } from 'react-hot-loader'; +import { inject, observer } from 'mobx-react'; +import config from 'app/core/config'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import { NavStore } from 'app/stores/NavStore/NavStore'; +import { TeamsStore, ITeam } from 'app/stores/TeamsStore/TeamsStore'; +import { ViewStore } from 'app/stores/ViewStore/ViewStore'; +import TeamMembers from './TeamMembers'; +import TeamSettings from './TeamSettings'; +import TeamGroupSync from './TeamGroupSync'; + +interface Props { + nav: typeof NavStore.Type; + teams: typeof TeamsStore.Type; + view: typeof ViewStore.Type; +} + +@inject('nav', 'teams', 'view') +@observer +export class TeamPages extends React.Component { + isSyncEnabled: boolean; + currentPage: string; + + constructor(props) { + super(props); + + this.isSyncEnabled = config.buildInfo.isEnterprise; + this.currentPage = this.getCurrentPage(); + + this.loadTeam(); + } + + async loadTeam() { + const { teams, nav, view } = this.props; + + await teams.loadById(view.routeParams.get('id')); + + nav.initTeamPage(this.getCurrentTeam(), this.currentPage, this.isSyncEnabled); + } + + getCurrentTeam(): ITeam { + const { teams, view } = this.props; + return teams.map.get(view.routeParams.get('id')); + } + + getCurrentPage() { + const pages = ['members', 'settings', 'groupsync']; + const currentPage = this.props.view.routeParams.get('page'); + return _.includes(pages, currentPage) ? currentPage : pages[0]; + } + + render() { + const { nav } = this.props; + const currentTeam = this.getCurrentTeam(); + + if (!nav.main) { + return null; + } + + return ( +
    + + {currentTeam && ( +
    + {this.currentPage === 'members' && } + {this.currentPage === 'settings' && } + {this.currentPage === 'groupsync' && this.isSyncEnabled && } +
    + )} +
    + ); + } +} + +export default hot(module)(TeamPages); diff --git a/public/app/containers/Teams/TeamSettings.tsx b/public/app/containers/Teams/TeamSettings.tsx new file mode 100644 index 000000000000..142088a5d1ee --- /dev/null +++ b/public/app/containers/Teams/TeamSettings.tsx @@ -0,0 +1,69 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { observer } from 'mobx-react'; +import { ITeam } from 'app/stores/TeamsStore/TeamsStore'; +import { Label } from 'app/core/components/Forms/Forms'; + +interface Props { + team: ITeam; +} + +@observer +export class TeamSettings extends React.Component { + constructor(props) { + super(props); + } + + onChangeName = evt => { + this.props.team.setName(evt.target.value); + }; + + onChangeEmail = evt => { + this.props.team.setEmail(evt.target.value); + }; + + onUpdate = evt => { + evt.preventDefault(); + this.props.team.update(); + }; + + render() { + return ( +
    +

    Team Settings

    +
    +
    + + +
    +
    + + +
    + +
    + +
    +
    +
    + ); + } +} + +export default hot(module)(TeamSettings); diff --git a/public/app/core/angular_wrappers.ts b/public/app/core/angular_wrappers.ts index ace0eb00b07d..a4439509f8e0 100644 --- a/public/app/core/angular_wrappers.ts +++ b/public/app/core/angular_wrappers.ts @@ -5,7 +5,6 @@ import EmptyListCTA from './components/EmptyListCTA/EmptyListCTA'; import LoginBackground from './components/Login/LoginBackground'; import { SearchResult } from './components/search/SearchResult'; import { TagFilter } from './components/TagFilter/TagFilter'; -import UserPicker from './components/Picker/UserPicker'; import DashboardPermissions from './components/Permissions/DashboardPermissions'; export function registerAngularDirectives() { @@ -19,6 +18,5 @@ export function registerAngularDirectives() { ['onSelect', { watchDepth: 'reference' }], ['tagOptions', { watchDepth: 'reference' }], ]); - react2AngularDirective('selectUserPicker', UserPicker, ['backendSrv', 'handlePicked']); react2AngularDirective('dashboardPermissions', DashboardPermissions, ['backendSrv', 'dashboardId', 'folder']); } diff --git a/public/app/core/components/Forms/Forms.tsx b/public/app/core/components/Forms/Forms.tsx new file mode 100644 index 000000000000..4b74d48ba08b --- /dev/null +++ b/public/app/core/components/Forms/Forms.tsx @@ -0,0 +1,21 @@ +import React, { SFC, ReactNode } from 'react'; +import Tooltip from '../Tooltip/Tooltip'; + +interface Props { + tooltip?: string; + for?: string; + children: ReactNode; +} + +export const Label: SFC = props => { + return ( + + {props.children} + {props.tooltip && ( + + + + )} + + ); +}; diff --git a/public/app/core/components/Permissions/AddPermissions.jest.tsx b/public/app/core/components/Permissions/AddPermissions.jest.tsx index fe97c4c7e62f..513a22ddea41 100644 --- a/public/app/core/components/Permissions/AddPermissions.jest.tsx +++ b/public/app/core/components/Permissions/AddPermissions.jest.tsx @@ -1,32 +1,32 @@ -import React from 'react'; +import React from 'react'; +import { shallow } from 'enzyme'; import AddPermissions from './AddPermissions'; import { RootStore } from 'app/stores/RootStore/RootStore'; -import { backendSrv } from 'test/mocks/common'; -import { shallow } from 'enzyme'; +import { getBackendSrv } from 'app/core/services/backend_srv'; + +jest.mock('app/core/services/backend_srv', () => ({ + getBackendSrv: () => { + return { + get: () => { + return Promise.resolve([ + { id: 2, dashboardId: 1, role: 'Viewer', permission: 1, permissionName: 'View' }, + { id: 3, dashboardId: 1, role: 'Editor', permission: 1, permissionName: 'Edit' }, + ]); + }, + post: jest.fn(() => Promise.resolve({})), + }; + }, +})); describe('AddPermissions', () => { let wrapper; let store; let instance; + let backendSrv: any = getBackendSrv(); beforeAll(() => { - backendSrv.get.mockReturnValue( - Promise.resolve([ - { id: 2, dashboardId: 1, role: 'Viewer', permission: 1, permissionName: 'View' }, - { id: 3, dashboardId: 1, role: 'Editor', permission: 1, permissionName: 'Edit' }, - ]) - ); - - backendSrv.post = jest.fn(() => Promise.resolve({})); - - store = RootStore.create( - {}, - { - backendSrv: backendSrv, - } - ); - - wrapper = shallow(); + store = RootStore.create({}, { backendSrv: backendSrv }); + wrapper = shallow(); instance = wrapper.instance(); return store.permissions.load(1, true, false); }); @@ -43,8 +43,8 @@ describe('AddPermissions', () => { login: 'user2', }; - instance.typeChanged(evt); - instance.userPicked(userItem); + instance.onTypeChanged(evt); + instance.onUserSelected(userItem); wrapper.update(); @@ -70,8 +70,8 @@ describe('AddPermissions', () => { name: 'ug1', }; - instance.typeChanged(evt); - instance.teamPicked(teamItem); + instance.onTypeChanged(evt); + instance.onTeamSelected(teamItem); wrapper.update(); diff --git a/public/app/core/components/Permissions/AddPermissions.tsx b/public/app/core/components/Permissions/AddPermissions.tsx index 4dcd07ffb48a..289e27aa731b 100644 --- a/public/app/core/components/Permissions/AddPermissions.tsx +++ b/public/app/core/components/Permissions/AddPermissions.tsx @@ -1,24 +1,19 @@ -import React, { Component } from 'react'; +import React, { Component } from 'react'; import { observer } from 'mobx-react'; import { aclTypes } from 'app/stores/PermissionsStore/PermissionsStore'; -import UserPicker, { User } from 'app/core/components/Picker/UserPicker'; -import TeamPicker, { Team } from 'app/core/components/Picker/TeamPicker'; +import { UserPicker, User } from 'app/core/components/Picker/UserPicker'; +import { TeamPicker, Team } from 'app/core/components/Picker/TeamPicker'; import DescriptionPicker, { OptionWithDescription } from 'app/core/components/Picker/DescriptionPicker'; import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore'; -export interface IProps { +export interface Props { permissions: any; - backendSrv: any; } + @observer -class AddPermissions extends Component { +class AddPermissions extends Component { constructor(props) { super(props); - this.userPicked = this.userPicked.bind(this); - this.teamPicked = this.teamPicked.bind(this); - this.permissionPicked = this.permissionPicked.bind(this); - this.typeChanged = this.typeChanged.bind(this); - this.handleSubmit = this.handleSubmit.bind(this); } componentWillMount() { @@ -26,49 +21,49 @@ class AddPermissions extends Component { permissions.resetNewType(); } - typeChanged(evt) { + onTypeChanged = evt => { const { value } = evt.target; const { permissions } = this.props; permissions.setNewType(value); - } + }; - userPicked(user: User) { + onUserSelected = (user: User) => { const { permissions } = this.props; if (!user) { permissions.newItem.setUser(null, null); return; } return permissions.newItem.setUser(user.id, user.login, user.avatarUrl); - } + }; - teamPicked(team: Team) { + onTeamSelected = (team: Team) => { const { permissions } = this.props; if (!team) { permissions.newItem.setTeam(null, null); return; } return permissions.newItem.setTeam(team.id, team.name, team.avatarUrl); - } + }; - permissionPicked(permission: OptionWithDescription) { + onPermissionChanged = (permission: OptionWithDescription) => { const { permissions } = this.props; return permissions.newItem.setPermission(permission.value); - } + }; resetNewType() { const { permissions } = this.props; return permissions.resetNewType(); } - handleSubmit(evt) { + onSubmit = evt => { evt.preventDefault(); const { permissions } = this.props; permissions.addStoreItem(); - } + }; render() { - const { permissions, backendSrv } = this.props; + const { permissions } = this.props; const newItem = permissions.newItem; const pickerClassName = 'width-20'; @@ -79,12 +74,12 @@ class AddPermissions extends Component { -
    -
    Add Permission For
    + +
    Add Permission For
    - {aclTypes.map((option, idx) => { return (
    - + {
    {}} + onSelected={() => {}} value={item.permission} disabled={true} className={'gf-form-input--form-dropdown-right'} diff --git a/public/app/core/components/Permissions/PermissionsListItem.tsx b/public/app/core/components/Permissions/PermissionsListItem.tsx index b0158525d52a..a17aa8c04df9 100644 --- a/public/app/core/components/Permissions/PermissionsListItem.tsx +++ b/public/app/core/components/Permissions/PermissionsListItem.tsx @@ -68,7 +68,7 @@ export default observer(({ item, removeItem, permissionChanged, itemIndex, folde
    void; + onSelected: (permission) => void; value: number; disabled: boolean; className?: string; @@ -16,14 +16,14 @@ export interface OptionWithDescription { description: string; } -class DescriptionPicker extends Component { +class DescriptionPicker extends Component { constructor(props) { super(props); this.state = {}; } render() { - const { optionsWithDesc, handlePicked, value, disabled, className } = this.props; + const { optionsWithDesc, onSelected, value, disabled, className } = this.props; return (
    @@ -34,7 +34,7 @@ class DescriptionPicker extends Component { clearable={false} labelKey="label" options={optionsWithDesc} - onChange={handlePicked} + onChange={onSelected} className={`width-7 gf-form-input gf-form-input--form-dropdown ${className || ''}`} optionComponent={DescriptionOption} placeholder="Choose" diff --git a/public/app/core/components/Picker/TeamPicker.jest.tsx b/public/app/core/components/Picker/TeamPicker.jest.tsx index 20b7620e0ac6..3db9f7bb4eb4 100644 --- a/public/app/core/components/Picker/TeamPicker.jest.tsx +++ b/public/app/core/components/Picker/TeamPicker.jest.tsx @@ -1,19 +1,23 @@ -import React from 'react'; +import React from 'react'; import renderer from 'react-test-renderer'; -import TeamPicker from './TeamPicker'; +import { TeamPicker } from './TeamPicker'; -const model = { - backendSrv: { - get: () => { - return new Promise((resolve, reject) => {}); - }, +jest.mock('app/core/services/backend_srv', () => ({ + getBackendSrv: () => { + return { + get: () => { + return Promise.resolve([]); + }, + }; }, - handlePicked: () => {}, -}; +})); describe('TeamPicker', () => { it('renders correctly', () => { - const tree = renderer.create().toJSON(); + const props = { + onSelected: () => {}, + }; + const tree = renderer.create().toJSON(); expect(tree).toMatchSnapshot(); }); }); diff --git a/public/app/core/components/Picker/TeamPicker.tsx b/public/app/core/components/Picker/TeamPicker.tsx index 2dfff1850dd2..04f108ff8dae 100644 --- a/public/app/core/components/Picker/TeamPicker.tsx +++ b/public/app/core/components/Picker/TeamPicker.tsx @@ -1,18 +1,19 @@ -import React, { Component } from 'react'; +import React, { Component } from 'react'; import Select from 'react-select'; import PickerOption from './PickerOption'; -import withPicker from './withPicker'; import { debounce } from 'lodash'; +import { getBackendSrv } from 'app/core/services/backend_srv'; -export interface IProps { - backendSrv: any; - isLoading: boolean; - toggleLoading: any; - handlePicked: (user) => void; +export interface Props { + onSelected: (team: Team) => void; value?: string; className?: string; } +export interface State { + isLoading; +} + export interface Team { id: number; label: string; @@ -20,13 +21,12 @@ export interface Team { avatarUrl: string; } -class TeamPicker extends Component { +export class TeamPicker extends Component { debouncedSearch: any; - backendSrv: any; constructor(props) { super(props); - this.state = {}; + this.state = { isLoading: false }; this.search = this.search.bind(this); this.debouncedSearch = debounce(this.search, 300, { @@ -36,9 +36,9 @@ class TeamPicker extends Component { } search(query?: string) { - const { toggleLoading, backendSrv } = this.props; + const backendSrv = getBackendSrv(); + this.setState({ isLoading: true }); - toggleLoading(true); return backendSrv.get(`/api/teams/search?perpage=10&page=1&query=${query}`).then(result => { const teams = result.teams.map(team => { return { @@ -49,18 +49,18 @@ class TeamPicker extends Component { }; }); - toggleLoading(false); + this.setState({ isLoading: false }); return { options: teams }; }); } render() { - const AsyncComponent = this.state.creatable ? Select.AsyncCreatable : Select.Async; - const { isLoading, handlePicked, value, className } = this.props; + const { onSelected, value, className } = this.props; + const { isLoading } = this.state; return (
    - { loadOptions={this.debouncedSearch} loadingPlaceholder="Loading..." noResultsText="No teams found" - onChange={handlePicked} + onChange={onSelected} className={`gf-form-input gf-form-input--form-dropdown ${className || ''}`} optionComponent={PickerOption} - placeholder="Choose" + placeholder="Select a team" value={value} autosize={true} /> @@ -80,5 +80,3 @@ class TeamPicker extends Component { ); } } - -export default withPicker(TeamPicker); diff --git a/public/app/core/components/Picker/UserPicker.jest.tsx b/public/app/core/components/Picker/UserPicker.jest.tsx index 756fa2d9801f..054ca643700e 100644 --- a/public/app/core/components/Picker/UserPicker.jest.tsx +++ b/public/app/core/components/Picker/UserPicker.jest.tsx @@ -1,19 +1,20 @@ -import React from 'react'; +import React from 'react'; import renderer from 'react-test-renderer'; -import UserPicker from './UserPicker'; +import { UserPicker } from './UserPicker'; -const model = { - backendSrv: { - get: () => { - return new Promise((resolve, reject) => {}); - }, +jest.mock('app/core/services/backend_srv', () => ({ + getBackendSrv: () => { + return { + get: () => { + return Promise.resolve([]); + }, + }; }, - handlePicked: () => {}, -}; +})); describe('UserPicker', () => { it('renders correctly', () => { - const tree = renderer.create().toJSON(); + const tree = renderer.create( {}} />).toJSON(); expect(tree).toMatchSnapshot(); }); }); diff --git a/public/app/core/components/Picker/UserPicker.tsx b/public/app/core/components/Picker/UserPicker.tsx index 77bf6c1fe15d..e50513c44e1f 100644 --- a/public/app/core/components/Picker/UserPicker.tsx +++ b/public/app/core/components/Picker/UserPicker.tsx @@ -1,18 +1,19 @@ import React, { Component } from 'react'; import Select from 'react-select'; import PickerOption from './PickerOption'; -import withPicker from './withPicker'; import { debounce } from 'lodash'; +import { getBackendSrv } from 'app/core/services/backend_srv'; -export interface IProps { - backendSrv: any; - isLoading: boolean; - toggleLoading: any; - handlePicked: (user) => void; +export interface Props { + onSelected: (user: User) => void; value?: string; className?: string; } +export interface State { + isLoading: boolean; +} + export interface User { id: number; label: string; @@ -20,13 +21,12 @@ export interface User { login: string; } -class UserPicker extends Component { +export class UserPicker extends Component { debouncedSearch: any; - backendSrv: any; constructor(props) { super(props); - this.state = {}; + this.state = { isLoading: false }; this.search = this.search.bind(this); this.debouncedSearch = debounce(this.search, 300, { @@ -36,29 +36,34 @@ class UserPicker extends Component { } search(query?: string) { - const { toggleLoading, backendSrv } = this.props; + const backendSrv = getBackendSrv(); - toggleLoading(true); - return backendSrv.get(`/api/org/users?query=${query}&limit=10`).then(result => { - const users = result.map(user => { + this.setState({ isLoading: true }); + + return backendSrv + .get(`/api/org/users?query=${query}&limit=10`) + .then(result => { return { - id: user.userId, - label: `${user.login} - ${user.email}`, - avatarUrl: user.avatarUrl, - login: user.login, + options: result.map(user => ({ + id: user.userId, + label: `${user.login} - ${user.email}`, + avatarUrl: user.avatarUrl, + login: user.login, + })), }; + }) + .finally(() => { + this.setState({ isLoading: false }); }); - toggleLoading(false); - return { options: users }; - }); } render() { - const AsyncComponent = this.state.creatable ? Select.AsyncCreatable : Select.Async; - const { isLoading, handlePicked, value, className } = this.props; + const { value, className } = this.props; + const { isLoading } = this.state; + return (
    - { loadOptions={this.debouncedSearch} loadingPlaceholder="Loading..." noResultsText="No users found" - onChange={handlePicked} + onChange={this.props.onSelected} className={`gf-form-input gf-form-input--form-dropdown ${className || ''}`} optionComponent={PickerOption} - placeholder="Choose" + placeholder="Select user" value={value} autosize={true} /> @@ -78,5 +83,3 @@ class UserPicker extends Component { ); } } - -export default withPicker(UserPicker); diff --git a/public/app/core/components/Picker/withPicker.tsx b/public/app/core/components/Picker/withPicker.tsx deleted file mode 100644 index 838ef927c308..000000000000 --- a/public/app/core/components/Picker/withPicker.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import React, { Component } from 'react'; - -export interface IProps { - backendSrv: any; - handlePicked: (data) => void; - value?: string; - className?: string; -} - -export default function withPicker(WrappedComponent) { - return class WithPicker extends Component { - constructor(props) { - super(props); - this.toggleLoading = this.toggleLoading.bind(this); - - this.state = { - isLoading: false, - }; - } - - toggleLoading(isLoading) { - this.setState(prevState => { - return { - ...prevState, - isLoading: isLoading, - }; - }); - } - - render() { - return ; - } - }; -} diff --git a/public/app/core/components/grafana_app.ts b/public/app/core/components/grafana_app.ts index fd2e32db3a72..bd6b69750064 100644 --- a/public/app/core/components/grafana_app.ts +++ b/public/app/core/components/grafana_app.ts @@ -8,7 +8,7 @@ import appEvents from 'app/core/app_events'; import Drop from 'tether-drop'; import { createStore } from 'app/stores/store'; import colors from 'app/core/utils/colors'; -import { BackendSrv } from 'app/core/services/backend_srv'; +import { BackendSrv, setBackendSrv } from 'app/core/services/backend_srv'; import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; export class GrafanaCtrl { @@ -24,6 +24,8 @@ export class GrafanaCtrl { backendSrv: BackendSrv, datasourceSrv: DatasourceSrv ) { + // sets singleston instances for angular services so react components can access them + setBackendSrv(backendSrv); createStore({ backendSrv, datasourceSrv }); $scope.init = function() { diff --git a/public/app/core/components/team_picker.ts b/public/app/core/components/team_picker.ts deleted file mode 100644 index 228767a76c4a..000000000000 --- a/public/app/core/components/team_picker.ts +++ /dev/null @@ -1,64 +0,0 @@ -import coreModule from 'app/core/core_module'; -import _ from 'lodash'; - -const template = ` - -`; -export class TeamPickerCtrl { - group: any; - teamPicked: any; - debouncedSearchGroups: any; - - /** @ngInject */ - constructor(private backendSrv) { - this.debouncedSearchGroups = _.debounce(this.searchGroups, 500, { - leading: true, - trailing: false, - }); - this.reset(); - } - - reset() { - this.group = { text: 'Choose', value: null }; - } - - searchGroups(query: string) { - return Promise.resolve( - this.backendSrv.get('/api/teams/search?perpage=10&page=1&query=' + query).then(result => { - return _.map(result.teams, ug => { - return { text: ug.name, value: ug }; - }); - }) - ); - } - - onChange(option) { - this.teamPicked({ $group: option.value }); - } -} - -export function teamPicker() { - return { - restrict: 'E', - template: template, - controller: TeamPickerCtrl, - bindToController: true, - controllerAs: 'ctrl', - scope: { - teamPicked: '&', - }, - link: function(scope, elem, attrs, ctrl) { - scope.$on('team-picker-reset', () => { - ctrl.reset(); - }); - }, - }; -} - -coreModule.directive('teamPicker', teamPicker); diff --git a/public/app/core/components/user_picker.ts b/public/app/core/components/user_picker.ts deleted file mode 100644 index 606ded098858..000000000000 --- a/public/app/core/components/user_picker.ts +++ /dev/null @@ -1,71 +0,0 @@ -import coreModule from 'app/core/core_module'; -import _ from 'lodash'; - -const template = ` - -`; -export class UserPickerCtrl { - user: any; - debouncedSearchUsers: any; - userPicked: any; - - /** @ngInject */ - constructor(private backendSrv) { - this.reset(); - this.debouncedSearchUsers = _.debounce(this.searchUsers, 500, { - leading: true, - trailing: false, - }); - } - - searchUsers(query: string) { - return Promise.resolve( - this.backendSrv.get('/api/users/search?perpage=10&page=1&query=' + query).then(result => { - return _.map(result.users, user => { - return { text: user.login + ' - ' + user.email, value: user }; - }); - }) - ); - } - - onChange(option) { - this.userPicked({ $user: option.value }); - } - - reset() { - this.user = { text: 'Choose', value: null }; - } -} - -export interface User { - id: number; - name: string; - login: string; - email: string; -} - -export function userPicker() { - return { - restrict: 'E', - template: template, - controller: UserPickerCtrl, - bindToController: true, - controllerAs: 'ctrl', - scope: { - userPicked: '&', - }, - link: function(scope, elem, attrs, ctrl) { - scope.$on('user-picker-reset', () => { - ctrl.reset(); - }); - }, - }; -} - -coreModule.directive('userPicker', userPicker); diff --git a/public/app/core/core.ts b/public/app/core/core.ts index fb7021fe8837..d6088283f3be 100644 --- a/public/app/core/core.ts +++ b/public/app/core/core.ts @@ -44,8 +44,6 @@ import { KeybindingSrv } from './services/keybindingSrv'; import { helpModal } from './components/help/help'; import { JsonExplorer } from './components/json_explorer/json_explorer'; import { NavModelSrv, NavModel } from './nav_model_srv'; -import { userPicker } from './components/user_picker'; -import { teamPicker } from './components/team_picker'; import { geminiScrollbar } from './components/scroll/scroll'; import { pageScrollbar } from './components/scroll/page_scroll'; import { gfPageDirective } from './components/gf_page'; @@ -83,8 +81,6 @@ export { JsonExplorer, NavModelSrv, NavModel, - userPicker, - teamPicker, geminiScrollbar, pageScrollbar, gfPageDirective, diff --git a/public/app/core/services/backend_srv.ts b/public/app/core/services/backend_srv.ts index d582b6a3b182..1aeeedef4dd6 100644 --- a/public/app/core/services/backend_srv.ts +++ b/public/app/core/services/backend_srv.ts @@ -368,3 +368,17 @@ export class BackendSrv { } coreModule.service('backendSrv', BackendSrv); + +// +// Code below is to expore the service to react components +// + +let singletonInstance: BackendSrv; + +export function setBackendSrv(instance: BackendSrv) { + singletonInstance = instance; +} + +export function getBackendSrv(): BackendSrv { + return singletonInstance; +} diff --git a/public/app/features/org/all.ts b/public/app/features/org/all.ts index 97e01c53fe33..8872450e3ab5 100644 --- a/public/app/features/org/all.ts +++ b/public/app/features/org/all.ts @@ -5,8 +5,6 @@ import './select_org_ctrl'; import './change_password_ctrl'; import './new_org_ctrl'; import './user_invite_ctrl'; -import './teams_ctrl'; -import './team_details_ctrl'; import './create_team_ctrl'; import './org_api_keys_ctrl'; import './org_details_ctrl'; diff --git a/public/app/features/org/partials/team_details.html b/public/app/features/org/partials/team_details.html deleted file mode 100644 index 3ce851d55463..000000000000 --- a/public/app/features/org/partials/team_details.html +++ /dev/null @@ -1,105 +0,0 @@ - - -
    -

    Team Details

    - - -
    - Name - -
    -
    - - Email - - This is optional and is primarily used for allowing custom team avatars. - - - -
    - -
    - -
    - - -
    - -

    Team Members

    -
    -
    - Add member - - -
    -
    - - - - - - - - - - - - - - - - -
    UsernameEmail
    {{member.login}}{{member.email}} - - - -
    -
    - - This team has no members yet. - -
    - -
    - -
    - -

    Mappings to external groups

    -
    -
    - Add group - -
    -
    - -
    -
    - - - - - - - - - - - - -
    Group
    {{group.groupId}} - - - -
    -
    - - This team has no associated groups yet. - -
    - -
    diff --git a/public/app/features/org/partials/teams.html b/public/app/features/org/partials/teams.html deleted file mode 100755 index e15a15cf5736..000000000000 --- a/public/app/features/org/partials/teams.html +++ /dev/null @@ -1,68 +0,0 @@ - - -
    -
    - -
    - - - - Add Team - -
    - -
    - - - - - - - - - - - - - - - - - - - -
    NameEmailMembers
    - - - -
    -
    - -
    -
      -
    1. - -
    2. -
    -
    - - - No Teams found. - -
    diff --git a/public/app/features/org/specs/team_details_ctrl.jest.ts b/public/app/features/org/specs/team_details_ctrl.jest.ts deleted file mode 100644 index c636de7ec564..000000000000 --- a/public/app/features/org/specs/team_details_ctrl.jest.ts +++ /dev/null @@ -1,42 +0,0 @@ -import '../team_details_ctrl'; -import TeamDetailsCtrl from '../team_details_ctrl'; - -describe('TeamDetailsCtrl', () => { - var backendSrv = { - searchUsers: jest.fn(() => Promise.resolve([])), - get: jest.fn(() => Promise.resolve([])), - post: jest.fn(() => Promise.resolve([])), - }; - - //Team id - var routeParams = { - id: 1, - }; - - var navModelSrv = { - getNav: jest.fn(), - }; - - var teamDetailsCtrl = new TeamDetailsCtrl({ $broadcast: jest.fn() }, backendSrv, routeParams, navModelSrv); - - describe('when user is chosen to be added to team', () => { - beforeEach(() => { - teamDetailsCtrl = new TeamDetailsCtrl({ $broadcast: jest.fn() }, backendSrv, routeParams, navModelSrv); - const userItem = { - id: 2, - login: 'user2', - }; - teamDetailsCtrl.userPicked(userItem); - }); - - it('should parse the result and save to db', () => { - expect(backendSrv.post.mock.calls[0][0]).toBe('/api/teams/1/members'); - expect(backendSrv.post.mock.calls[0][1].userId).toBe(2); - }); - - it('should refresh the list after saving.', () => { - expect(backendSrv.get.mock.calls[0][0]).toBe('/api/teams/1'); - expect(backendSrv.get.mock.calls[1][0]).toBe('/api/teams/1/members'); - }); - }); -}); diff --git a/public/app/features/org/team_details_ctrl.ts b/public/app/features/org/team_details_ctrl.ts deleted file mode 100644 index 6e0fddafa9d9..000000000000 --- a/public/app/features/org/team_details_ctrl.ts +++ /dev/null @@ -1,108 +0,0 @@ -import coreModule from 'app/core/core_module'; -import config from 'app/core/config'; - -export default class TeamDetailsCtrl { - team: Team; - teamMembers: User[] = []; - navModel: any; - teamGroups: TeamGroup[] = []; - newGroupId: string; - isMappingsEnabled: boolean; - - /** @ngInject **/ - constructor(private $scope, private backendSrv, private $routeParams, navModelSrv) { - this.navModel = navModelSrv.getNav('cfg', 'teams', 0); - this.userPicked = this.userPicked.bind(this); - this.get = this.get.bind(this); - this.newGroupId = ''; - this.isMappingsEnabled = config.buildInfo.isEnterprise; - this.get(); - } - - get() { - if (this.$routeParams && this.$routeParams.id) { - this.backendSrv.get(`/api/teams/${this.$routeParams.id}`).then(result => { - this.team = result; - }); - - this.backendSrv.get(`/api/teams/${this.$routeParams.id}/members`).then(result => { - this.teamMembers = result; - }); - - if (this.isMappingsEnabled) { - this.backendSrv.get(`/api/teams/${this.$routeParams.id}/groups`).then(result => { - this.teamGroups = result; - }); - } - } - } - - removeTeamMember(teamMember: TeamMember) { - this.$scope.appEvent('confirm-modal', { - title: 'Remove Member', - text: 'Are you sure you want to remove ' + teamMember.login + ' from this group?', - yesText: 'Remove', - icon: 'fa-warning', - onConfirm: () => { - this.removeMemberConfirmed(teamMember); - }, - }); - } - - removeMemberConfirmed(teamMember: TeamMember) { - this.backendSrv.delete(`/api/teams/${this.$routeParams.id}/members/${teamMember.userId}`).then(this.get); - } - - update() { - if (!this.$scope.teamDetailsForm.$valid) { - return; - } - - this.backendSrv.put('/api/teams/' + this.team.id, { - name: this.team.name, - email: this.team.email, - }); - } - - userPicked(user) { - this.backendSrv.post(`/api/teams/${this.$routeParams.id}/members`, { userId: user.id }).then(() => { - this.$scope.$broadcast('user-picker-reset'); - this.get(); - }); - } - - addGroup() { - this.backendSrv.post(`/api/teams/${this.$routeParams.id}/groups`, { groupId: this.newGroupId }).then(() => { - this.get(); - }); - } - - removeGroup(group: TeamGroup) { - this.backendSrv.delete(`/api/teams/${this.$routeParams.id}/groups/${group.groupId}`).then(this.get); - } -} - -export interface TeamGroup { - groupId: string; -} - -export interface Team { - id: number; - name: string; - email: string; -} - -export interface User { - id: number; - name: string; - login: string; - email: string; -} - -export interface TeamMember { - userId: number; - name: string; - login: string; -} - -coreModule.controller('TeamDetailsCtrl', TeamDetailsCtrl); diff --git a/public/app/features/org/teams_ctrl.ts b/public/app/features/org/teams_ctrl.ts deleted file mode 100644 index 29317e73d3b3..000000000000 --- a/public/app/features/org/teams_ctrl.ts +++ /dev/null @@ -1,66 +0,0 @@ -import coreModule from 'app/core/core_module'; -import appEvents from 'app/core/app_events'; - -export class TeamsCtrl { - teams: any; - pages = []; - perPage = 50; - page = 1; - totalPages: number; - showPaging = false; - query: any = ''; - navModel: any; - - /** @ngInject */ - constructor(private backendSrv, navModelSrv) { - this.navModel = navModelSrv.getNav('cfg', 'teams', 0); - this.get(); - } - - get() { - this.backendSrv - .get(`/api/teams/search?perpage=${this.perPage}&page=${this.page}&query=${this.query}`) - .then(result => { - this.teams = result.teams; - this.page = result.page; - this.perPage = result.perPage; - this.totalPages = Math.ceil(result.totalCount / result.perPage); - this.showPaging = this.totalPages > 1; - this.pages = []; - - for (var i = 1; i < this.totalPages + 1; i++) { - this.pages.push({ page: i, current: i === this.page }); - } - }); - } - - navigateToPage(page) { - this.page = page.page; - this.get(); - } - - deleteTeam(team) { - appEvents.emit('confirm-modal', { - title: 'Delete', - text: 'Are you sure you want to delete Team ' + team.name + '?', - yesText: 'Delete', - icon: 'fa-warning', - onConfirm: () => { - this.deleteTeamConfirmed(team); - }, - }); - } - - deleteTeamConfirmed(team) { - this.backendSrv.delete('/api/teams/' + team.id).then(this.get.bind(this)); - } - - openTeamModal() { - appEvents.emit('show-modal', { - templateHtml: '', - modalClass: 'modal--narrow', - }); - } -} - -coreModule.controller('TeamsCtrl', TeamsCtrl); diff --git a/public/app/routes/routes.ts b/public/app/routes/routes.ts index 568b3438b385..cd1aed549e0b 100644 --- a/public/app/routes/routes.ts +++ b/public/app/routes/routes.ts @@ -5,6 +5,8 @@ import ServerStats from 'app/containers/ServerStats/ServerStats'; import AlertRuleList from 'app/containers/AlertRuleList/AlertRuleList'; import FolderSettings from 'app/containers/ManageDashboards/FolderSettings'; import FolderPermissions from 'app/containers/ManageDashboards/FolderPermissions'; +import TeamPages from 'app/containers/Teams/TeamPages'; +import TeamList from 'app/containers/Teams/TeamList'; /** @ngInject **/ export function setupAngularRoutes($routeProvider, $locationProvider) { @@ -140,19 +142,23 @@ export function setupAngularRoutes($routeProvider, $locationProvider) { controller: 'OrgApiKeysCtrl', }) .when('/org/teams', { - templateUrl: 'public/app/features/org/partials/teams.html', - controller: 'TeamsCtrl', - controllerAs: 'ctrl', + template: '', + resolve: { + roles: () => ['Editor', 'Admin'], + component: () => TeamList, + }, }) .when('/org/teams/new', { templateUrl: 'public/app/features/org/partials/create_team.html', controller: 'CreateTeamCtrl', controllerAs: 'ctrl', }) - .when('/org/teams/edit/:id', { - templateUrl: 'public/app/features/org/partials/team_details.html', - controller: 'TeamDetailsCtrl', - controllerAs: 'ctrl', + .when('/org/teams/edit/:id/:page?', { + template: '', + resolve: { + roles: () => ['Admin'], + component: () => TeamPages, + }, }) .when('/profile', { templateUrl: 'public/app/features/org/partials/profile.html', diff --git a/public/app/stores/NavStore/NavItem.ts b/public/app/stores/NavStore/NavItem.ts index 4521d4291aa7..3e8a2a837b39 100644 --- a/public/app/stores/NavStore/NavItem.ts +++ b/public/app/stores/NavStore/NavItem.ts @@ -1,4 +1,4 @@ -import { types } from 'mobx-state-tree'; +import { types } from 'mobx-state-tree'; export const NavItem = types.model('NavItem', { id: types.identifier(types.string), @@ -8,6 +8,7 @@ export const NavItem = types.model('NavItem', { icon: types.optional(types.string, ''), img: types.optional(types.string, ''), active: types.optional(types.boolean, false), + hideFromTabs: types.optional(types.boolean, false), breadcrumbs: types.optional(types.array(types.late(() => Breadcrumb)), []), children: types.optional(types.array(types.late(() => NavItem)), []), }); diff --git a/public/app/stores/NavStore/NavStore.ts b/public/app/stores/NavStore/NavStore.ts index 86348c00487f..c69c32befa85 100644 --- a/public/app/stores/NavStore/NavStore.ts +++ b/public/app/stores/NavStore/NavStore.ts @@ -1,6 +1,7 @@ import _ from 'lodash'; import { types, getEnv } from 'mobx-state-tree'; import { NavItem } from './NavItem'; +import { ITeam } from '../TeamsStore/TeamsStore'; export const NavStore = types .model('NavStore', { @@ -115,4 +116,43 @@ export const NavStore = types self.main = NavItem.create(main); }, + + initTeamPage(team: ITeam, tab: string, isSyncEnabled: boolean) { + let main = { + img: team.avatarUrl, + id: 'team-' + team.id, + subTitle: 'Manage members & settings', + url: '', + text: team.name, + breadcrumbs: [{ title: 'Teams', url: 'org/teams' }], + children: [ + { + active: tab === 'members', + icon: 'gicon gicon-team', + id: 'team-members', + text: 'Members', + url: `org/teams/edit/${team.id}/members`, + }, + { + active: tab === 'settings', + icon: 'fa fa-fw fa-sliders', + id: 'team-settings', + text: 'Settings', + url: `org/teams/edit/${team.id}/settings`, + }, + ], + }; + + if (isSyncEnabled) { + main.children.splice(1, 0, { + active: tab === 'groupsync', + icon: 'fa fa-fw fa-refresh', + id: 'team-settings', + text: 'External group sync', + url: `org/teams/edit/${team.id}/groupsync`, + }); + } + + self.main = NavItem.create(main); + }, })); diff --git a/public/app/stores/RootStore/RootStore.ts b/public/app/stores/RootStore/RootStore.ts index c3bfe75d59c0..8a915d20ef11 100644 --- a/public/app/stores/RootStore/RootStore.ts +++ b/public/app/stores/RootStore/RootStore.ts @@ -6,6 +6,7 @@ import { AlertListStore } from './../AlertListStore/AlertListStore'; import { ViewStore } from './../ViewStore/ViewStore'; import { FolderStore } from './../FolderStore/FolderStore'; import { PermissionsStore } from './../PermissionsStore/PermissionsStore'; +import { TeamsStore } from './../TeamsStore/TeamsStore'; export const RootStore = types.model({ search: types.optional(SearchStore, { @@ -28,6 +29,9 @@ export const RootStore = types.model({ routeParams: {}, }), folder: types.optional(FolderStore, {}), + teams: types.optional(TeamsStore, { + map: {}, + }), }); type IRootStoreType = typeof RootStore.Type; diff --git a/public/app/stores/TeamsStore/TeamsStore.ts b/public/app/stores/TeamsStore/TeamsStore.ts new file mode 100644 index 000000000000..01cdca895d45 --- /dev/null +++ b/public/app/stores/TeamsStore/TeamsStore.ts @@ -0,0 +1,156 @@ +import { types, getEnv, flow } from 'mobx-state-tree'; + +export const TeamMember = types.model('TeamMember', { + userId: types.identifier(types.number), + teamId: types.number, + avatarUrl: types.string, + email: types.string, + login: types.string, +}); + +type TeamMemberType = typeof TeamMember.Type; +export interface ITeamMember extends TeamMemberType {} + +export const TeamGroup = types.model('TeamGroup', { + groupId: types.identifier(types.string), + teamId: types.number, +}); + +type TeamGroupType = typeof TeamGroup.Type; +export interface ITeamGroup extends TeamGroupType {} + +export const Team = types + .model('Team', { + id: types.identifier(types.number), + name: types.string, + avatarUrl: types.string, + email: types.string, + memberCount: types.number, + search: types.optional(types.string, ''), + members: types.optional(types.map(TeamMember), {}), + groups: types.optional(types.map(TeamGroup), {}), + }) + .views(self => ({ + get filteredMembers() { + let members = this.members.values(); + let regex = new RegExp(self.search, 'i'); + return members.filter(member => { + return regex.test(member.login) || regex.test(member.email); + }); + }, + })) + .actions(self => ({ + setName(name: string) { + self.name = name; + }, + + setEmail(email: string) { + self.email = email; + }, + + setSearchQuery(query: string) { + self.search = query; + }, + + update: flow(function* load() { + const backendSrv = getEnv(self).backendSrv; + + yield backendSrv.put(`/api/teams/${self.id}`, { + name: self.name, + email: self.email, + }); + }), + + loadMembers: flow(function* load() { + const backendSrv = getEnv(self).backendSrv; + const rsp = yield backendSrv.get(`/api/teams/${self.id}/members`); + self.members.clear(); + + for (let member of rsp) { + self.members.set(member.userId.toString(), TeamMember.create(member)); + } + }), + + removeMember: flow(function* load(member: ITeamMember) { + const backendSrv = getEnv(self).backendSrv; + yield backendSrv.delete(`/api/teams/${self.id}/members/${member.userId}`); + // remove from store map + self.members.delete(member.userId.toString()); + }), + + addMember: flow(function* load(userId: number) { + const backendSrv = getEnv(self).backendSrv; + yield backendSrv.post(`/api/teams/${self.id}/members`, { userId: userId }); + }), + + loadGroups: flow(function* load() { + const backendSrv = getEnv(self).backendSrv; + const rsp = yield backendSrv.get(`/api/teams/${self.id}/groups`); + self.groups.clear(); + + for (let group of rsp) { + self.groups.set(group.groupId, TeamGroup.create(group)); + } + }), + + addGroup: flow(function* load(groupId: string) { + const backendSrv = getEnv(self).backendSrv; + yield backendSrv.post(`/api/teams/${self.id}/groups`, { groupId: groupId }); + self.groups.set( + groupId, + TeamGroup.create({ + teamId: self.id, + groupId: groupId, + }) + ); + }), + + removeGroup: flow(function* load(groupId: string) { + const backendSrv = getEnv(self).backendSrv; + yield backendSrv.delete(`/api/teams/${self.id}/groups/${groupId}`); + self.groups.delete(groupId); + }), + })); + +type TeamType = typeof Team.Type; +export interface ITeam extends TeamType {} + +export const TeamsStore = types + .model('TeamsStore', { + map: types.map(Team), + search: types.optional(types.string, ''), + }) + .views(self => ({ + get filteredTeams() { + let teams = this.map.values(); + let regex = new RegExp(self.search, 'i'); + return teams.filter(team => { + return regex.test(team.name); + }); + }, + })) + .actions(self => ({ + loadTeams: flow(function* load() { + const backendSrv = getEnv(self).backendSrv; + const rsp = yield backendSrv.get('/api/teams/search/', { perpage: 50, page: 1 }); + self.map.clear(); + + for (let team of rsp.teams) { + self.map.set(team.id.toString(), Team.create(team)); + } + }), + + setSearchQuery(query: string) { + self.search = query; + }, + + loadById: flow(function* load(id: string) { + if (self.map.has(id)) { + return; + } + + const backendSrv = getEnv(self).backendSrv; + const team = yield backendSrv.get(`/api/teams/${id}`); + self.map.set(id, Team.create(team)); + }), + })); diff --git a/public/sass/components/_gf-form.scss b/public/sass/components/_gf-form.scss index 756d88ee9356..0de386f3f683 100644 --- a/public/sass/components/_gf-form.scss +++ b/public/sass/components/_gf-form.scss @@ -403,9 +403,9 @@ select.gf-form-input ~ .gf-form-help-icon { .cta-form { position: relative; - padding: 1rem; + padding: 1.5rem; background-color: $empty-list-cta-bg; - margin-bottom: 1rem; + margin-bottom: 2rem; border-top: 3px solid $green; } diff --git a/public/test/jest-shim.ts b/public/test/jest-shim.ts index 80c4bb3d21b1..dbf9ac4be50a 100644 --- a/public/test/jest-shim.ts +++ b/public/test/jest-shim.ts @@ -1,6 +1,17 @@ declare var global: NodeJS.Global; -(global).requestAnimationFrame = (callback) => { +(global).requestAnimationFrame = callback => { setTimeout(callback, 0); }; +(Promise.prototype).finally = function(onFinally) { + return this.then( + /* onFulfilled */ + res => Promise.resolve(onFinally()).then(() => res), + /* onRejected */ + err => + Promise.resolve(onFinally()).then(() => { + throw err; + }) + ); +}; From 9b50c9038b7697abc4e72191c18326af976f2dc8 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Thu, 12 Jul 2018 03:23:38 +0900 Subject: [PATCH 118/263] skip backend request if extended statistics is invalid. (#12495) * check extended statistics pattern * check extended statistics pattern * Revert "check extended statistics pattern" This reverts commit 52c7b1a972636d5f5729e64ae5e00e6fae329257. * add test * fix test --- .../datasource/cloudwatch/datasource.ts | 8 ++++++++ .../cloudwatch/specs/datasource.jest.ts | 20 +++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/public/app/plugins/datasource/cloudwatch/datasource.ts b/public/app/plugins/datasource/cloudwatch/datasource.ts index 391f65bd7aed..00ce1bfa287b 100644 --- a/public/app/plugins/datasource/cloudwatch/datasource.ts +++ b/public/app/plugins/datasource/cloudwatch/datasource.ts @@ -39,6 +39,14 @@ export default class CloudWatchDatasource { item.dimensions = this.convertDimensionFormat(item.dimensions, options.scopedVars); item.period = String(this.getPeriod(item, options)); // use string format for period in graph query, and alerting + // valid ExtendedStatistics is like p90.00, check the pattern + let hasInvalidStatistics = item.statistics.some(s => { + return s.indexOf('p') === 0 && !/p\d{2}\.\d{2}/.test(s); + }); + if (hasInvalidStatistics) { + throw { message: 'Invalid extended statistics' }; + } + return _.extend( { refId: item.refId, diff --git a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts index 2dc6e57b1aa2..a89680086617 100644 --- a/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/cloudwatch/specs/datasource.jest.ts @@ -121,6 +121,26 @@ describe('CloudWatchDatasource', function() { }); }); + it('should cancel query for invalid extended statistics', function () { + var query = { + range: { from: 'now-1h', to: 'now' }, + rangeRaw: { from: 1483228800, to: 1483232400 }, + targets: [ + { + region: 'us-east-1', + namespace: 'AWS/EC2', + metricName: 'CPUUtilization', + dimensions: { + InstanceId: 'i-12345678', + }, + statistics: ['pNN.NN'], + period: '60s', + }, + ], + }; + expect(ctx.ds.query.bind(ctx.ds, query)).toThrow(/Invalid extended statistics/); + }); + it('should return series list', function(done) { ctx.ds.query(query).then(function(result) { expect(result.data[0].target).toBe(response.results.A.series[0].name); From 81e62e105143f9493169d86a20bc2dd0766dab38 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Thu, 12 Jul 2018 13:16:41 +0200 Subject: [PATCH 119/263] Fix freezing browser when loading plugin - broken since 4d2dd2209 - `*` was previously working as a path matcher, but freezes browser when used with new cache-busting plugin loader - changed matcher to be `/*` --- public/app/features/plugins/plugin_loader.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/features/plugins/plugin_loader.ts b/public/app/features/plugins/plugin_loader.ts index 20023e27b5c9..641b51007034 100644 --- a/public/app/features/plugins/plugin_loader.ts +++ b/public/app/features/plugins/plugin_loader.ts @@ -56,7 +56,7 @@ System.config({ css: 'vendor/plugin-css/css.js', }, meta: { - '*': { + '/*': { esModule: true, authorization: true, loader: 'plugin-loader', From 7361d352bf0fa503cc9775d5ceba39a2b6775e9b Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 12 Jul 2018 15:38:41 +0200 Subject: [PATCH 120/263] Add comments --- public/app/core/components/scroll/page_scroll.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/public/app/core/components/scroll/page_scroll.ts b/public/app/core/components/scroll/page_scroll.ts index 0cb36eba9144..b6603f06175e 100644 --- a/public/app/core/components/scroll/page_scroll.ts +++ b/public/app/core/components/scroll/page_scroll.ts @@ -29,10 +29,12 @@ export function pageScrollbar() { scope.$on('$routeChangeSuccess', () => { lastPos = 0; elem[0].scrollTop = 0; + // Focus page to enable scrolling by keyboard elem[0].focus({ preventScroll: true }); }); elem[0].tabIndex = -1; + // Focus page to enable scrolling by keyboard elem[0].focus({ preventScroll: true }); }, }; From 756c08e713ad2d1be7aad681aee6db7c85d8791f Mon Sep 17 00:00:00 2001 From: Shane Date: Fri, 13 Jul 2018 02:56:37 -0400 Subject: [PATCH 121/263] changed you to your (#12590) --- docs/sources/reference/templating.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index 8341b9770bda..efe9db61e3de 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -11,7 +11,7 @@ weight = 1 # Variables Variables allows for more interactive and dynamic dashboards. Instead of hard-coding things like server, application -and sensor name in you metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of +and sensor name in your metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns make it easy to change the data being displayed in your dashboard. {{< docs-imagebox img="/img/docs/v50/variables_dashboard.png" >}} From d06b26de262c1dccb9976d506fdc8e6f39b16118 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Fri, 13 Jul 2018 09:09:36 +0200 Subject: [PATCH 122/263] Explore Datasource selector Adds a datasource selector to the Explore UI. Only datasource plugins that have `explore: true` in their `plugin.json` can be selected. - adds datasource selector (based on react-select) to explore UI - adds getExploreSources to datasource service - new `explore` flag in datasource plugins model - Prometheus plugin enabled explore --- pkg/plugins/datasource_plugin.go | 1 + public/app/containers/Explore/Explore.tsx | 90 +++++++++++++++---- public/app/features/plugins/datasource_srv.ts | 33 ++++--- .../plugins/specs/datasource_srv.jest.ts | 30 ++++++- .../plugins/datasource/prometheus/plugin.json | 30 +++++-- public/sass/pages/_explore.scss | 4 + 6 files changed, 147 insertions(+), 41 deletions(-) diff --git a/pkg/plugins/datasource_plugin.go b/pkg/plugins/datasource_plugin.go index 2fec6acbf544..cef35a2e7d96 100644 --- a/pkg/plugins/datasource_plugin.go +++ b/pkg/plugins/datasource_plugin.go @@ -22,6 +22,7 @@ type DataSourcePlugin struct { Annotations bool `json:"annotations"` Metrics bool `json:"metrics"` Alerting bool `json:"alerting"` + Explore bool `json:"explore"` QueryOptions map[string]bool `json:"queryOptions,omitempty"` BuiltIn bool `json:"builtIn,omitempty"` Mixed bool `json:"mixed,omitempty"` diff --git a/public/app/containers/Explore/Explore.tsx b/public/app/containers/Explore/Explore.tsx index deebe84f2c8b..90bf09415720 100644 --- a/public/app/containers/Explore/Explore.tsx +++ b/public/app/containers/Explore/Explore.tsx @@ -1,16 +1,17 @@ import React from 'react'; import { hot } from 'react-hot-loader'; +import Select from 'react-select'; + import colors from 'app/core/utils/colors'; import TimeSeries from 'app/core/time_series2'; +import { decodePathComponent } from 'app/core/utils/location_util'; import ElapsedTime from './ElapsedTime'; import QueryRows from './QueryRows'; import Graph from './Graph'; import Table from './Table'; import TimePicker, { DEFAULT_RANGE } from './TimePicker'; -import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; import { buildQueryOptions, ensureQueries, generateQueryKey, hasQuery } from './utils/query'; -import { decodePathComponent } from 'app/core/utils/location_util'; function makeTimeSeriesList(dataList, options) { return dataList.map((seriesData, index) => { @@ -46,7 +47,8 @@ function parseInitialState(initial) { interface IExploreState { datasource: any; datasourceError: any; - datasourceLoading: any; + datasourceLoading: boolean | null; + datasourceMissing: boolean; graphResult: any; latency: number; loading: any; @@ -61,15 +63,14 @@ interface IExploreState { // @observer export class Explore extends React.Component { - datasourceSrv: DatasourceSrv; - constructor(props) { super(props); const { range, queries } = parseInitialState(props.routeParams.initial); this.state = { datasource: null, datasourceError: null, - datasourceLoading: true, + datasourceLoading: null, + datasourceMissing: false, graphResult: null, latency: 0, loading: false, @@ -85,19 +86,43 @@ export class Explore extends React.Component { } async componentDidMount() { - const datasource = await this.props.datasourceSrv.get(); - const testResult = await datasource.testDatasource(); - if (testResult.status === 'success') { - this.setState({ datasource, datasourceError: null, datasourceLoading: false }, () => this.handleSubmit()); + const { datasourceSrv } = this.props; + if (!datasourceSrv) { + throw new Error('No datasource service passed as props.'); + } + const datasources = datasourceSrv.getExploreSources(); + if (datasources.length > 0) { + this.setState({ datasourceLoading: true }); + // Try default datasource, otherwise get first + let datasource = await datasourceSrv.get(); + if (!datasource.meta.explore) { + datasource = await datasourceSrv.get(datasources[0].name); + } + this.setDatasource(datasource); } else { - this.setState({ datasource: null, datasourceError: testResult.message, datasourceLoading: false }); + this.setState({ datasourceMissing: true }); } } componentDidCatch(error) { + this.setState({ datasourceError: error }); console.error(error); } + async setDatasource(datasource) { + try { + const testResult = await datasource.testDatasource(); + if (testResult.status === 'success') { + this.setState({ datasource, datasourceError: null, datasourceLoading: false }, () => this.handleSubmit()); + } else { + this.setState({ datasource: datasource, datasourceError: testResult.message, datasourceLoading: false }); + } + } catch (error) { + const message = (error && error.statusText) || error; + this.setState({ datasource: datasource, datasourceError: message, datasourceLoading: false }); + } + } + handleAddQueryRow = index => { const { queries } = this.state; const nextQueries = [ @@ -108,6 +133,18 @@ export class Explore extends React.Component { this.setState({ queries: nextQueries }); }; + handleChangeDatasource = async option => { + this.setState({ + datasource: null, + datasourceError: null, + datasourceLoading: true, + graphResult: null, + tableResult: null, + }); + const datasource = await this.props.datasourceSrv.get(option.value); + this.setDatasource(datasource); + }; + handleChangeQuery = (query, index) => { const { queries } = this.state; const nextQuery = { @@ -226,11 +263,12 @@ export class Explore extends React.Component { }; render() { - const { position, split } = this.props; + const { datasourceSrv, position, split } = this.props; const { datasource, datasourceError, datasourceLoading, + datasourceMissing, graphResult, latency, loading, @@ -247,6 +285,12 @@ export class Explore extends React.Component { const graphButtonActive = showingBoth || showingGraph ? 'active' : ''; const tableButtonActive = showingBoth || showingTable ? 'active' : ''; const exploreClass = split ? 'explore explore-split' : 'explore'; + const datasources = datasourceSrv.getExploreSources().map(ds => ({ + value: ds.name, + label: ds.name, + })); + const selectedDatasource = datasource ? datasource.name : undefined; + return (
    @@ -264,6 +308,18 @@ export class Explore extends React.Component {
    )} + {!datasourceMissing ? ( +
    + + +
    From ae935bf08b14c1457b4f96580048003c494b8063 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 11:06:30 +0200 Subject: [PATCH 157/263] Add jest test file --- .../panel/graph/specs/graph_ctrl.jest.ts | 81 +++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts new file mode 100644 index 000000000000..bd5a69f28dd5 --- /dev/null +++ b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts @@ -0,0 +1,81 @@ +// import { describe, beforeEach, it, expect, angularMocks } from '../../../../../test/lib/common'; + +import moment from 'moment'; +import { GraphCtrl } from '../module'; + +describe('GraphCtrl', function() { + let ctx = {}; + + beforeEach(() => { + ctx.ctrl = new GraphCtrl({}, {}, {}); + }); + + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach( + // angularMocks.module(function($compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + + // beforeEach(ctx.providePhase()); + // beforeEach(ctx.createPanelController(GraphCtrl)); + beforeEach(() => { + ctx.ctrl.annotationsPromise = Promise.resolve({}); + ctx.ctrl.updateTimeRange(); + }); + + describe('when time series are outside range', function() { + beforeEach(function() { + var data = [ + { + target: 'test.cpu1', + datapoints: [[45, 1234567890], [60, 1234567899]], + }, + ]; + + ctx.ctrl.range = { from: moment().valueOf(), to: moment().valueOf() }; + ctx.ctrl.onDataReceived(data); + }); + + it('should set datapointsOutside', function() { + expect(ctx.ctrl.dataWarning.title).toBe('Data points outside time range'); + }); + }); + + describe('when time series are inside range', function() { + beforeEach(function() { + var range = { + from: moment() + .subtract(1, 'days') + .valueOf(), + to: moment().valueOf(), + }; + + var data = [ + { + target: 'test.cpu1', + datapoints: [[45, range.from + 1000], [60, range.from + 10000]], + }, + ]; + + ctx.ctrl.range = range; + ctx.ctrl.onDataReceived(data); + }); + + it('should set datapointsOutside', function() { + expect(ctx.ctrl.dataWarning).toBe(null); + }); + }); + + describe('datapointsCount given 2 series', function() { + beforeEach(function() { + var data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }]; + ctx.ctrl.onDataReceived(data); + }); + + it('should set datapointsCount warning', function() { + expect(ctx.ctrl.dataWarning.title).toBe('No data points'); + }); + }); +}); From ee2eda615e4eac174907752911f486ebc5310ef9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20W=C4=99grzynek?= Date: Mon, 23 Jul 2018 12:07:54 +0200 Subject: [PATCH 158/263] Update kbn.ts --- public/app/core/utils/kbn.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts index 463025567cd1..4fc4829811f1 100644 --- a/public/app/core/utils/kbn.ts +++ b/public/app/core/utils/kbn.ts @@ -449,6 +449,7 @@ kbn.valueFormats.currencyNOK = kbn.formatBuilders.currency('kr'); kbn.valueFormats.currencySEK = kbn.formatBuilders.currency('kr'); kbn.valueFormats.currencyCZK = kbn.formatBuilders.currency('czk'); kbn.valueFormats.currencyCHF = kbn.formatBuilders.currency('CHF'); +kbn.valueFormats.currencyPLN = kbn.formatBuilders.currency('zł'); // Data (Binary) kbn.valueFormats.bits = kbn.formatBuilders.binarySIPrefix('b'); @@ -880,6 +881,7 @@ kbn.getUnitFormats = function() { { text: 'Swedish Krona (kr)', value: 'currencySEK' }, { text: 'Czech koruna (czk)', value: 'currencyCZK' }, { text: 'Swiss franc (CHF)', value: 'currencyCHF' }, + { text: 'Polish Złoty (PLN)', value: 'currencyPLN' }, ], }, { From 0fa98a812bac189c107a17ba7c1cb15050800fda Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 23 Jul 2018 13:13:18 +0200 Subject: [PATCH 159/263] changelog: add notes about closing #12691 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e53b3a904a32..5cf8602824ba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,7 @@ * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) +* **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) # 5.2.2 (unreleased) From ed8568f0dffcad022309e48e4b837ecd0414b69d Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 13:38:16 +0200 Subject: [PATCH 160/263] Add graph_ctrl jest --- .../panel/graph/specs/graph_ctrl.jest.ts | 42 ++++++---- .../panel/graph/specs/graph_ctrl_specs.ts | 78 ------------------- 2 files changed, 29 insertions(+), 91 deletions(-) delete mode 100644 public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts index bd5a69f28dd5..a778697527f4 100644 --- a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts +++ b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts @@ -1,25 +1,41 @@ -// import { describe, beforeEach, it, expect, angularMocks } from '../../../../../test/lib/common'; - import moment from 'moment'; import { GraphCtrl } from '../module'; +jest.mock('../graph', () => ({})); + describe('GraphCtrl', function() { + let injector = { + get: () => { + return { + timeRange: () => { + return { + from: '', + to: '', + }; + }, + }; + }, + }; + + let scope = { + $on: function() {}, + }; + + GraphCtrl.prototype.panel = { + events: { + on: function() {}, + }, + gridPos: { + w: 100, + }, + }; + let ctx = {}; beforeEach(() => { - ctx.ctrl = new GraphCtrl({}, {}, {}); + ctx.ctrl = new GraphCtrl(scope, injector, {}); }); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach( - // angularMocks.module(function($compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - - // beforeEach(ctx.providePhase()); - // beforeEach(ctx.createPanelController(GraphCtrl)); beforeEach(() => { ctx.ctrl.annotationsPromise = Promise.resolve({}); ctx.ctrl.updateTimeRange(); diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts b/public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts deleted file mode 100644 index d5cefb345cf2..000000000000 --- a/public/app/plugins/panel/graph/specs/graph_ctrl_specs.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { describe, beforeEach, it, expect, angularMocks } from '../../../../../test/lib/common'; - -import moment from 'moment'; -import { GraphCtrl } from '../module'; -import helpers from '../../../../../test/specs/helpers'; - -describe('GraphCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.services')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - - beforeEach(ctx.providePhase()); - beforeEach(ctx.createPanelController(GraphCtrl)); - beforeEach(() => { - ctx.ctrl.annotationsPromise = Promise.resolve({}); - ctx.ctrl.updateTimeRange(); - }); - - describe('when time series are outside range', function() { - beforeEach(function() { - var data = [ - { - target: 'test.cpu1', - datapoints: [[45, 1234567890], [60, 1234567899]], - }, - ]; - - ctx.ctrl.range = { from: moment().valueOf(), to: moment().valueOf() }; - ctx.ctrl.onDataReceived(data); - }); - - it('should set datapointsOutside', function() { - expect(ctx.ctrl.dataWarning.title).to.be('Data points outside time range'); - }); - }); - - describe('when time series are inside range', function() { - beforeEach(function() { - var range = { - from: moment() - .subtract(1, 'days') - .valueOf(), - to: moment().valueOf(), - }; - - var data = [ - { - target: 'test.cpu1', - datapoints: [[45, range.from + 1000], [60, range.from + 10000]], - }, - ]; - - ctx.ctrl.range = range; - ctx.ctrl.onDataReceived(data); - }); - - it('should set datapointsOutside', function() { - expect(ctx.ctrl.dataWarning).to.be(null); - }); - }); - - describe('datapointsCount given 2 series', function() { - beforeEach(function() { - var data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }]; - ctx.ctrl.onDataReceived(data); - }); - - it('should set datapointsCount warning', function() { - expect(ctx.ctrl.dataWarning.title).to.be('No data points'); - }); - }); -}); From 529883b61d43fefac03b578e1fe86b4259e9c2de Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 13:39:32 +0200 Subject: [PATCH 161/263] Change to arrow functions --- public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts index a778697527f4..788ca1840ba5 100644 --- a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts +++ b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts @@ -18,12 +18,12 @@ describe('GraphCtrl', function() { }; let scope = { - $on: function() {}, + $on: () => {}, }; GraphCtrl.prototype.panel = { events: { - on: function() {}, + on: () => {}, }, gridPos: { w: 100, From 46e31621b071e36f658788c5b8f9c9ab11ca1aab Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 14:28:17 +0200 Subject: [PATCH 162/263] Add jest file --- .../influxdb/specs/query_ctrl.jest.ts | 211 ++++++++++++++++++ 1 file changed, 211 insertions(+) create mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts new file mode 100644 index 000000000000..e4dd5b226f41 --- /dev/null +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -0,0 +1,211 @@ +import '../query_ctrl'; +import 'app/core/services/segment_srv'; +// import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; +// import helpers from 'test/specs/helpers'; +import { InfluxQueryCtrl } from '../query_ctrl'; + +describe('InfluxDBQueryCtrl', function() { + let uiSegmentSrv = { + newPlusButton: () => {}, + }; + + let ctx = { + dataSource: { + metricFindQuery: jest.fn(() => Promise.resolve([])), + }, + }; + + InfluxQueryCtrl.prototype.panelCtrl = { + panel: { + targets: [{}], + }, + }; + + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.module(function($compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + // beforeEach(ctx.providePhase()); + + // beforeEach( + // angularMocks.inject(($rootScope, $controller, $q) => { + // ctx.$q = $q; + // ctx.scope = $rootScope.$new(); + // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); + // ctx.target = { target: {} }; + // ctx.panelCtrl = { + // panel: { + // targets: [ctx.target], + // }, + // }; + // ctx.panelCtrl.refresh = sinon.spy(); + // ctx.ctrl = $controller( + // InfluxQueryCtrl, + // { $scope: ctx.scope }, + // { + // panelCtrl: ctx.panelCtrl, + // target: ctx.target, + // datasource: ctx.datasource, + // } + // ); + // }) + // ); + + beforeEach(() => { + ctx.ctrl = new InfluxQueryCtrl({}, {}, {}, {}, uiSegmentSrv); + }); + + describe('init', function() { + it('should init tagSegments', function() { + expect(ctx.ctrl.tagSegments.length).toBe(1); + }); + + it('should init measurementSegment', function() { + expect(ctx.ctrl.measurementSegment.value).toBe('select measurement'); + }); + }); + + describe('when first tag segment is updated', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + }); + + it('should update tag key', function() { + expect(ctx.ctrl.target.tags[0].key).toBe('asd'); + expect(ctx.ctrl.tagSegments[0].type).toBe('key'); + }); + + it('should add tagSegments', function() { + expect(ctx.ctrl.tagSegments.length).toBe(3); + }); + }); + + describe('when last tag value segment is updated', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + }); + + it('should update tag value', function() { + expect(ctx.ctrl.target.tags[0].value).toBe('server1'); + }); + + it('should set tag operator', function() { + expect(ctx.ctrl.target.tags[0].operator).toBe('='); + }); + + it('should add plus button for another filter', function() { + expect(ctx.ctrl.tagSegments[3].fake).toBe(true); + }); + }); + + describe('when last tag value segment is updated to regex', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); + }); + + it('should update operator', function() { + expect(ctx.ctrl.tagSegments[1].value).toBe('=~'); + expect(ctx.ctrl.target.tags[0].operator).toBe('=~'); + }); + }); + + describe('when second tag key is added', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + }); + + it('should update tag key', function() { + expect(ctx.ctrl.target.tags[1].key).toBe('key2'); + }); + + it('should add AND segment', function() { + expect(ctx.ctrl.tagSegments[3].value).toBe('AND'); + }); + }); + + describe('when condition is changed', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); + }); + + it('should update tag condition', function() { + expect(ctx.ctrl.target.tags[1].condition).toBe('OR'); + }); + + it('should update AND segment', function() { + expect(ctx.ctrl.tagSegments[3].value).toBe('OR'); + expect(ctx.ctrl.tagSegments.length).toBe(7); + }); + }); + + describe('when deleting first tag filter after value is selected', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); + }); + + it('should remove tags', function() { + expect(ctx.ctrl.target.tags.length).toBe(0); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).toBe(1); + expect(ctx.ctrl.tagSegments[0].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).toBe(4); + expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).toBe(4); + expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value after second tag filter is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).toBe(4); + expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); + }); + }); +}); From 6b6a23ff6a24c62955b48c9794c0b99023ceb608 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 12 Jul 2018 15:32:32 +0200 Subject: [PATCH 163/263] Add support for interval in query variable Add range to scopedVars Add basic tests and extract function for range vars Add support for range query variable in createQuery Template vars squash --- .../datasource/prometheus/datasource.ts | 20 ++++++++- .../prometheus/specs/datasource.jest.ts | 43 ++++++++++++++++++- 2 files changed, 60 insertions(+), 3 deletions(-) diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts index 69ce6f440c5d..75a946d6f368 100644 --- a/public/app/plugins/datasource/prometheus/datasource.ts +++ b/public/app/plugins/datasource/prometheus/datasource.ts @@ -196,13 +196,14 @@ export class PrometheusDatasource { var intervalFactor = target.intervalFactor || 1; // Adjust the interval to take into account any specified minimum and interval factor plus Prometheus limits var adjustedInterval = this.adjustInterval(interval, minInterval, range, intervalFactor); - var scopedVars = options.scopedVars; + var scopedVars = { ...options.scopedVars, ...this.getRangeScopedVars() }; // If the interval was adjusted, make a shallow copy of scopedVars with updated interval vars if (interval !== adjustedInterval) { interval = adjustedInterval; scopedVars = Object.assign({}, options.scopedVars, { __interval: { text: interval + 's', value: interval + 's' }, __interval_ms: { text: interval * 1000, value: interval * 1000 }, + ...this.getRangeScopedVars(), }); } query.step = interval; @@ -285,11 +286,26 @@ export class PrometheusDatasource { return this.$q.when([]); } - let interpolated = this.templateSrv.replace(query, {}, this.interpolateQueryExpr); + let scopedVars = { + __interval: { text: this.interval, value: this.interval }, + __interval_ms: { text: kbn.interval_to_ms(this.interval), value: kbn.interval_to_ms(this.interval) }, + ...this.getRangeScopedVars(), + }; + let interpolated = this.templateSrv.replace(query, scopedVars, this.interpolateQueryExpr); var metricFindQuery = new PrometheusMetricFindQuery(this, interpolated, this.timeSrv); return metricFindQuery.process(); } + getRangeScopedVars() { + let range = this.timeSrv.timeRange(); + let msRange = range.to.diff(range.from); + let regularRange = kbn.secondsToHms(msRange / 1000); + return { + __range_ms: { text: msRange, value: msRange }, + __range: { text: regularRange, value: regularRange }, + }; + } + annotationQuery(options) { var annotation = options.annotation; var expr = annotation.expr || ''; diff --git a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts index 15798a33cd20..b8b2b50f5909 100644 --- a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts @@ -2,6 +2,7 @@ import _ from 'lodash'; import moment from 'moment'; import q from 'q'; import { alignRange, PrometheusDatasource, prometheusSpecialRegexEscape, prometheusRegularEscape } from '../datasource'; +jest.mock('../metric_find_query'); describe('PrometheusDatasource', () => { let ctx: any = {}; @@ -18,7 +19,14 @@ describe('PrometheusDatasource', () => { ctx.templateSrvMock = { replace: a => a, }; - ctx.timeSrvMock = {}; + ctx.timeSrvMock = { + timeRange: () => { + return { + from: moment(1531468681), + to: moment(1531489712), + }; + }, + }; beforeEach(() => { ctx.ds = new PrometheusDatasource(instanceSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock); @@ -204,4 +212,37 @@ describe('PrometheusDatasource', () => { expect(prometheusSpecialRegexEscape('+looking$glass?')).toEqual('\\\\+looking\\\\$glass\\\\?'); }); }); + + describe('metricFindQuery', () => { + beforeEach(() => { + let query = 'query_result(topk(5,rate(http_request_duration_microseconds_count[$__interval])))'; + ctx.templateSrvMock.replace = jest.fn(); + ctx.timeSrvMock.timeRange = () => { + return { + from: moment(1531468681), + to: moment(1531489712), + }; + }; + ctx.ds = new PrometheusDatasource(instanceSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock); + ctx.ds.metricFindQuery(query); + }); + + it('should call templateSrv.replace with scopedVars', () => { + expect(ctx.templateSrvMock.replace.mock.calls[0][1]).toBeDefined(); + }); + + it('should have the correct range and range_ms', () => { + let range = ctx.templateSrvMock.replace.mock.calls[0][1].__range; + let rangeMs = ctx.templateSrvMock.replace.mock.calls[0][1].__range_ms; + expect(range).toEqual({ text: '21s', value: '21s' }); + expect(rangeMs).toEqual({ text: 21031, value: 21031 }); + }); + + it('should pass the default interval value', () => { + let interval = ctx.templateSrvMock.replace.mock.calls[0][1].__interval; + let intervalMs = ctx.templateSrvMock.replace.mock.calls[0][1].__interval_ms; + expect(interval).toEqual({ text: '15s', value: '15s' }); + expect(intervalMs).toEqual({ text: 15000, value: 15000 }); + }); + }); }); From bb0af52d34b201a960d3ace19a54e1b44be8748b Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 14:54:58 +0200 Subject: [PATCH 164/263] Figuring out why it doesn't initialize --- .../app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index e4dd5b226f41..c3b8d3ae20d6 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -16,8 +16,9 @@ describe('InfluxDBQueryCtrl', function() { }; InfluxQueryCtrl.prototype.panelCtrl = { + target: { target: {} }, panel: { - targets: [{}], + targets: [this.target], }, }; From 76bc02b3fae41bae9b5a3643a503566332d4c267 Mon Sep 17 00:00:00 2001 From: David Date: Mon, 23 Jul 2018 14:58:11 +0200 Subject: [PATCH 165/263] Update CHANGELOG.md Added #12597 --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5cf8602824ba..58570c89c186 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ * **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2) * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484) +* **Prometheus**: Add $interval, $interval_ms, $range, and $range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597) * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) From 816ee82d2695157cbd969f43623ae686b683f08d Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 15:25:59 +0200 Subject: [PATCH 166/263] Add docs about global variables in query template variables --- docs/sources/features/datasources/prometheus.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/sources/features/datasources/prometheus.md b/docs/sources/features/datasources/prometheus.md index 4ff0baee1085..190220fb0f17 100644 --- a/docs/sources/features/datasources/prometheus.md +++ b/docs/sources/features/datasources/prometheus.md @@ -75,6 +75,9 @@ Name | Description For details of *metric names*, *label names* and *label values* are please refer to the [Prometheus documentation](http://prometheus.io/docs/concepts/data_model/#metric-names-and-labels). + +It is possible to use some global template variables in Prometheus query template variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, where `$__range` is the dashboard's current time range and `$__range_ms` is the current range in milliseconds. + ### Using variables in queries There are two syntaxes: From 70575c8f7816f90b074d7f65226b70e334786958 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 15:34:03 +0200 Subject: [PATCH 167/263] Add templating docs for --- docs/sources/reference/templating.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index efe9db61e3de..08a142d36369 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -273,6 +273,9 @@ The `$__timeFilter` is used in the MySQL data source. This variable is only available in the Singlestat panel and can be used in the prefix or suffix fields on the Options tab. The variable will be replaced with the series name or alias. +### The $__range Variable +Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond representation called `$__range_ms`. + ## Repeating Panels Template variables can be very useful to dynamically change your queries across a whole dashboard. If you want From 47bec0fd91f42cb28b87c0130088ed667149cb70 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 23 Jul 2018 15:42:47 +0200 Subject: [PATCH 168/263] Fix requested changes --- .../panel/graph/specs/graph_ctrl.jest.ts | 23 ++++++++----------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts index 788ca1840ba5..3ebcf6cdf313 100644 --- a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts +++ b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts @@ -3,7 +3,7 @@ import { GraphCtrl } from '../module'; jest.mock('../graph', () => ({})); -describe('GraphCtrl', function() { +describe('GraphCtrl', () => { let injector = { get: () => { return { @@ -34,15 +34,12 @@ describe('GraphCtrl', function() { beforeEach(() => { ctx.ctrl = new GraphCtrl(scope, injector, {}); - }); - - beforeEach(() => { ctx.ctrl.annotationsPromise = Promise.resolve({}); ctx.ctrl.updateTimeRange(); }); - describe('when time series are outside range', function() { - beforeEach(function() { + describe('when time series are outside range', () => { + beforeEach(() => { var data = [ { target: 'test.cpu1', @@ -54,13 +51,13 @@ describe('GraphCtrl', function() { ctx.ctrl.onDataReceived(data); }); - it('should set datapointsOutside', function() { + it('should set datapointsOutside', () => { expect(ctx.ctrl.dataWarning.title).toBe('Data points outside time range'); }); }); - describe('when time series are inside range', function() { - beforeEach(function() { + describe('when time series are inside range', () => { + beforeEach(() => { var range = { from: moment() .subtract(1, 'days') @@ -79,18 +76,18 @@ describe('GraphCtrl', function() { ctx.ctrl.onDataReceived(data); }); - it('should set datapointsOutside', function() { + it('should set datapointsOutside', () => { expect(ctx.ctrl.dataWarning).toBe(null); }); }); - describe('datapointsCount given 2 series', function() { - beforeEach(function() { + describe('datapointsCount given 2 series', () => { + beforeEach(() => { var data = [{ target: 'test.cpu1', datapoints: [] }, { target: 'test.cpu2', datapoints: [] }]; ctx.ctrl.onDataReceived(data); }); - it('should set datapointsCount warning', function() { + it('should set datapointsCount warning', () => { expect(ctx.ctrl.dataWarning.title).toBe('No data points'); }); }); From 6b071054a31cbf55eb7e62499b91ece784e4432f Mon Sep 17 00:00:00 2001 From: srid12 Date: Mon, 23 Jul 2018 19:53:26 +0530 Subject: [PATCH 169/263] changing callback fn into arrow functions for correct usage of this (#12673) --- public/app/plugins/datasource/opentsdb/datasource.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/public/app/plugins/datasource/opentsdb/datasource.ts b/public/app/plugins/datasource/opentsdb/datasource.ts index 39ad6c64e114..07ec4a794eca 100644 --- a/public/app/plugins/datasource/opentsdb/datasource.ts +++ b/public/app/plugins/datasource/opentsdb/datasource.ts @@ -480,17 +480,17 @@ export default class OpenTsDatasource { mapMetricsToTargets(metrics, options, tsdbVersion) { var interpolatedTagValue, arrTagV; - return _.map(metrics, function(metricData) { + return _.map(metrics, metricData => { if (tsdbVersion === 3) { return metricData.query.index; } else { - return _.findIndex(options.targets, function(target) { + return _.findIndex(options.targets, target => { if (target.filters && target.filters.length > 0) { return target.metric === metricData.metric; } else { return ( target.metric === metricData.metric && - _.every(target.tags, function(tagV, tagK) { + _.every(target.tags, (tagV, tagK) => { interpolatedTagValue = this.templateSrv.replace(tagV, options.scopedVars, 'pipe'); arrTagV = interpolatedTagValue.split('|'); return _.includes(arrTagV, metricData.tags[tagK]) || interpolatedTagValue === '*'; From d9bf89438325c01a0fe5f3205b4cefff25930c40 Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Tue, 24 Jul 2018 16:58:48 +0900 Subject: [PATCH 170/263] return 400 if user input error --- pkg/api/metrics.go | 2 +- pkg/tsdb/cloudwatch/cloudwatch.go | 21 +++++++++++++++++---- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go index c1b8ffe595e3..00ad25ab8c2f 100644 --- a/pkg/api/metrics.go +++ b/pkg/api/metrics.go @@ -52,7 +52,7 @@ func QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) Response { if res.Error != nil { res.ErrorString = res.Error.Error() resp.Message = res.ErrorString - statusCode = 500 + statusCode = 400 } } diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 38fbac3aa292..4af73fc2ba9f 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -17,6 +17,7 @@ import ( "golang.org/x/sync/errgroup" "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/service/cloudwatch" "github.com/aws/aws-sdk-go/service/ec2/ec2iface" @@ -100,7 +101,10 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo query, err := parseQuery(queryContext.Queries[i].Model) if err != nil { - return nil, err + result.Results[query.RefId] = &tsdb.QueryResult{ + Error: err, + } + return result, nil } query.RefId = queryContext.Queries[i].RefId @@ -113,15 +117,21 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo } if query.Id == "" && query.Expression != "" { - return nil, fmt.Errorf("Invalid query: id should be set if using expression") + result.Results[query.RefId] = &tsdb.QueryResult{ + Error: fmt.Errorf("Invalid query: id should be set if using expression"), + } + return result, nil } eg.Go(func() error { queryRes, err := e.executeQuery(ectx, query, queryContext) - if err != nil { + if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" { return err } result.Results[queryRes.RefId] = queryRes + if err != nil { + result.Results[queryRes.RefId].Error = err + } return nil }) } @@ -131,11 +141,14 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo q := getMetricDataQuery eg.Go(func() error { queryResponses, err := e.executeGetMetricDataQuery(ectx, region, q, queryContext) - if err != nil { + if ae, ok := err.(awserr.Error); ok && ae.Code() == "500" { return err } for _, queryRes := range queryResponses { result.Results[queryRes.RefId] = queryRes + if err != nil { + result.Results[queryRes.RefId].Error = err + } } return nil }) From 59c17053990203e6f303b5dfbdb3aa4b20611e75 Mon Sep 17 00:00:00 2001 From: Leonard Gram Date: Tue, 24 Jul 2018 10:34:11 +0200 Subject: [PATCH 171/263] docs: mentation that config changes requires restart. --- docs/sources/installation/configuration.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index e3db7a1d60b3..2a799b044b30 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -15,6 +15,8 @@ weight = 1 The Grafana back-end has a number of configuration options that can be specified in a `.ini` configuration file or specified using environment variables. +> **Note.** Grafana needs to be restarted for any configuration changes to take effect. + ## Comments In .ini Files Semicolons (the `;` char) are the standard way to comment out lines in a `.ini` file. From 93e73919e814b6d583aa1f3666c22cf922faaa55 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 11:03:46 +0200 Subject: [PATCH 172/263] fix code style --- pkg/tsdb/postgres/postgres.go | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pkg/tsdb/postgres/postgres.go b/pkg/tsdb/postgres/postgres.go index 5ca333fe6335..f19e4fb54f4e 100644 --- a/pkg/tsdb/postgres/postgres.go +++ b/pkg/tsdb/postgres/postgres.go @@ -53,10 +53,12 @@ func generateConnectionString(datasource *models.DataSource) string { } sslmode := datasource.JsonData.Get("sslmode").MustString("verify-full") - u := &url.URL{Scheme: "postgres", - User: url.UserPassword(datasource.User, password), - Host: datasource.Url, Path: datasource.Database, - RawQuery: "sslmode=" + url.QueryEscape(sslmode)} + u := &url.URL{ + Scheme: "postgres", + User: url.UserPassword(datasource.User, password), + Host: datasource.Url, Path: datasource.Database, + RawQuery: "sslmode=" + url.QueryEscape(sslmode), + } return u.String() } From 35efb7c225ae35758ab1826e7ad0012f5ddf46a8 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 11:26:09 +0200 Subject: [PATCH 173/263] changelog: add notes about closing #12644 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 58570c89c186..160aab9b91a3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) +* **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan) * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) From 81c32780b905fa92ab874e4fac86395f0155f14a Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 11:27:53 +0200 Subject: [PATCH 174/263] Pass more tests --- .../plugins/datasource/influxdb/query_ctrl.ts | 1 - .../influxdb/specs/query_ctrl.jest.ts | 110 ++++++++++-------- 2 files changed, 60 insertions(+), 51 deletions(-) diff --git a/public/app/plugins/datasource/influxdb/query_ctrl.ts b/public/app/plugins/datasource/influxdb/query_ctrl.ts index ce669c9f4589..2be1ecc7bff1 100644 --- a/public/app/plugins/datasource/influxdb/query_ctrl.ts +++ b/public/app/plugins/datasource/influxdb/query_ctrl.ts @@ -22,7 +22,6 @@ export class InfluxQueryCtrl extends QueryCtrl { /** @ngInject **/ constructor($scope, $injector, private templateSrv, private $q, private uiSegmentSrv) { super($scope, $injector); - this.target = this.target; this.queryModel = new InfluxQuery(this.target, templateSrv, this.panel.scopedVars); this.queryBuilder = new InfluxQueryBuilder(this.target, this.datasource.database); diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index c3b8d3ae20d6..139efbc3afab 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -4,29 +4,28 @@ import 'app/core/services/segment_srv'; // import helpers from 'test/specs/helpers'; import { InfluxQueryCtrl } from '../query_ctrl'; -describe('InfluxDBQueryCtrl', function() { +describe('InfluxDBQueryCtrl', () => { let uiSegmentSrv = { newPlusButton: () => {}, - }; - - let ctx = { - dataSource: { - metricFindQuery: jest.fn(() => Promise.resolve([])), + newKey: key => key, + newKeyValue: key => key, + newSegment: seg => seg, + newSelectMeasurement: () => { + return { value: 'select measurement' }; }, + newOperator: op => op, + newFake: () => {}, }; - InfluxQueryCtrl.prototype.panelCtrl = { - target: { target: {} }, - panel: { - targets: [this.target], - }, + let ctx = { + dataSource: {}, }; // beforeEach(angularMocks.module('grafana.core')); // beforeEach(angularMocks.module('grafana.controllers')); // beforeEach(angularMocks.module('grafana.services')); // beforeEach( - // angularMocks.module(function($compileProvider) { + // angularMocks.module(($ =>compileProvider) { // $compileProvider.preAssignBindingsEnabled(true); // }) // ); @@ -56,147 +55,158 @@ describe('InfluxDBQueryCtrl', function() { // }) // ); - beforeEach(() => { - ctx.ctrl = new InfluxQueryCtrl({}, {}, {}, {}, uiSegmentSrv); + beforeEach(async () => { + InfluxQueryCtrl.prototype.datasource = { + metricFindQuery: jest.fn(() => Promise.resolve([])), + }; + InfluxQueryCtrl.prototype.panelCtrl = { + panel: { + targets: [InfluxQueryCtrl.target], + }, + }; + + InfluxQueryCtrl.prototype.target = { target: {} }; + console.log('creating new instance'); + ctx.ctrl = await new InfluxQueryCtrl({}, {}, {}, {}, uiSegmentSrv); }); - describe('init', function() { - it('should init tagSegments', function() { + describe('init', () => { + it('should init tagSegments', () => { expect(ctx.ctrl.tagSegments.length).toBe(1); }); - it('should init measurementSegment', function() { + it('should init measurementSegment', () => { expect(ctx.ctrl.measurementSegment.value).toBe('select measurement'); }); }); - describe('when first tag segment is updated', function() { - beforeEach(function() { + describe('when first tag segment is updated', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); }); - it('should update tag key', function() { + it('should update tag key', () => { expect(ctx.ctrl.target.tags[0].key).toBe('asd'); expect(ctx.ctrl.tagSegments[0].type).toBe('key'); }); - it('should add tagSegments', function() { + it('should add tagSegments', () => { expect(ctx.ctrl.tagSegments.length).toBe(3); }); }); - describe('when last tag value segment is updated', function() { - beforeEach(function() { + describe('when last tag value segment is updated', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); }); - it('should update tag value', function() { + it('should update tag value', () => { expect(ctx.ctrl.target.tags[0].value).toBe('server1'); }); - it('should set tag operator', function() { + it('should set tag operator', () => { expect(ctx.ctrl.target.tags[0].operator).toBe('='); }); - it('should add plus button for another filter', function() { + it('should add plus button for another filter', () => { expect(ctx.ctrl.tagSegments[3].fake).toBe(true); }); }); - describe('when last tag value segment is updated to regex', function() { - beforeEach(function() { + describe('when last tag value segment is updated to regex', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); }); - it('should update operator', function() { + it('should update operator', () => { expect(ctx.ctrl.tagSegments[1].value).toBe('=~'); expect(ctx.ctrl.target.tags[0].operator).toBe('=~'); }); }); - describe('when second tag key is added', function() { - beforeEach(function() { + describe('when second tag key is added', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); }); - it('should update tag key', function() { + it('should update tag key', () => { expect(ctx.ctrl.target.tags[1].key).toBe('key2'); }); - it('should add AND segment', function() { + it('should add AND segment', () => { expect(ctx.ctrl.tagSegments[3].value).toBe('AND'); }); }); - describe('when condition is changed', function() { - beforeEach(function() { + describe('when condition is changed', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); }); - it('should update tag condition', function() { + it('should update tag condition', () => { expect(ctx.ctrl.target.tags[1].condition).toBe('OR'); }); - it('should update AND segment', function() { + it('should update AND segment', () => { expect(ctx.ctrl.tagSegments[3].value).toBe('OR'); expect(ctx.ctrl.tagSegments.length).toBe(7); }); }); - describe('when deleting first tag filter after value is selected', function() { - beforeEach(function() { + describe('when deleting first tag filter after value is selected', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); }); - it('should remove tags', function() { + it('should remove tags', () => { expect(ctx.ctrl.target.tags.length).toBe(0); }); - it('should remove all segment after 2 and replace with plus button', function() { + it('should remove all segment after 2 and replace with plus button', () => { expect(ctx.ctrl.tagSegments.length).toBe(1); expect(ctx.ctrl.tagSegments[0].type).toBe('plus-button'); }); }); - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); }); - it('should remove all segment after 2 and replace with plus button', function() { + it('should remove all segment after 2 and replace with plus button', () => { expect(ctx.ctrl.tagSegments.length).toBe(4); expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); }); }); - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); }); - it('should remove all segment after 2 and replace with plus button', function() { + it('should remove all segment after 2 and replace with plus button', () => { expect(ctx.ctrl.tagSegments.length).toBe(4); expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); }); }); - describe('when deleting second tag value after second tag filter is complete', function() { - beforeEach(function() { + describe('when deleting second tag value after second tag filter is complete', () => { + beforeEach(() => { ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); @@ -204,7 +214,7 @@ describe('InfluxDBQueryCtrl', function() { ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); }); - it('should remove all segment after 2 and replace with plus button', function() { + it('should remove all segment after 2 and replace with plus button', () => { expect(ctx.ctrl.tagSegments.length).toBe(4); expect(ctx.ctrl.tagSegments[3].type).toBe('plus-button'); }); From 987a16086bbafeccf3c07a5099e5b3ddf914102b Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 14:34:37 +0200 Subject: [PATCH 175/263] Karma to Jest --- .../influxdb/specs/query_ctrl.jest.ts | 72 ++++--------------- 1 file changed, 15 insertions(+), 57 deletions(-) diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index 139efbc3afab..6b929432dfa8 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -1,73 +1,31 @@ import '../query_ctrl'; -import 'app/core/services/segment_srv'; +import { uiSegmentSrv } from 'app/core/services/segment_srv'; // import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; // import helpers from 'test/specs/helpers'; import { InfluxQueryCtrl } from '../query_ctrl'; describe('InfluxDBQueryCtrl', () => { - let uiSegmentSrv = { - newPlusButton: () => {}, - newKey: key => key, - newKeyValue: key => key, - newSegment: seg => seg, - newSelectMeasurement: () => { - return { value: 'select measurement' }; - }, - newOperator: op => op, - newFake: () => {}, - }; - - let ctx = { - dataSource: {}, - }; - - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach( - // angularMocks.module(($ =>compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - // beforeEach(ctx.providePhase()); - - // beforeEach( - // angularMocks.inject(($rootScope, $controller, $q) => { - // ctx.$q = $q; - // ctx.scope = $rootScope.$new(); - // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - // ctx.target = { target: {} }; - // ctx.panelCtrl = { - // panel: { - // targets: [ctx.target], - // }, - // }; - // ctx.panelCtrl.refresh = sinon.spy(); - // ctx.ctrl = $controller( - // InfluxQueryCtrl, - // { $scope: ctx.scope }, - // { - // panelCtrl: ctx.panelCtrl, - // target: ctx.target, - // datasource: ctx.datasource, - // } - // ); - // }) - // ); - - beforeEach(async () => { + let ctx = {}; + + beforeEach(() => { InfluxQueryCtrl.prototype.datasource = { - metricFindQuery: jest.fn(() => Promise.resolve([])), + metricFindQuery: () => Promise.resolve([]), }; + InfluxQueryCtrl.prototype.target = { target: {} }; InfluxQueryCtrl.prototype.panelCtrl = { panel: { - targets: [InfluxQueryCtrl.target], + targets: [InfluxQueryCtrl.prototype.target], }, + refresh: () => {}, }; - InfluxQueryCtrl.prototype.target = { target: {} }; - console.log('creating new instance'); - ctx.ctrl = await new InfluxQueryCtrl({}, {}, {}, {}, uiSegmentSrv); + ctx.ctrl = new InfluxQueryCtrl( + {}, + {}, + {}, + {}, + new uiSegmentSrv({ trustAsHtml: html => html }, { highlightVariablesAsHtml: () => {} }) + ); }); describe('init', () => { From 48ae9ec77ebbc5e3b1546a795af1f8fded555ff4 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 14:35:37 +0200 Subject: [PATCH 176/263] Remove comments and Karm test --- .../influxdb/specs/query_ctrl.jest.ts | 2 - .../influxdb/specs/query_ctrl_specs.ts | 193 ------------------ 2 files changed, 195 deletions(-) delete mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index 6b929432dfa8..4e3fc47a5fde 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -1,7 +1,5 @@ import '../query_ctrl'; import { uiSegmentSrv } from 'app/core/services/segment_srv'; -// import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; -// import helpers from 'test/specs/helpers'; import { InfluxQueryCtrl } from '../query_ctrl'; describe('InfluxDBQueryCtrl', () => { diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts deleted file mode 100644 index 4daa48d6b9d3..000000000000 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts +++ /dev/null @@ -1,193 +0,0 @@ -import '../query_ctrl'; -import 'app/core/services/segment_srv'; -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; -import helpers from 'test/specs/helpers'; -import { InfluxQueryCtrl } from '../query_ctrl'; - -describe('InfluxDBQueryCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - beforeEach(ctx.providePhase()); - - beforeEach( - angularMocks.inject(($rootScope, $controller, $q) => { - ctx.$q = $q; - ctx.scope = $rootScope.$new(); - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - ctx.target = { target: {} }; - ctx.panelCtrl = { - panel: { - targets: [ctx.target], - }, - }; - ctx.panelCtrl.refresh = sinon.spy(); - ctx.ctrl = $controller( - InfluxQueryCtrl, - { $scope: ctx.scope }, - { - panelCtrl: ctx.panelCtrl, - target: ctx.target, - datasource: ctx.datasource, - } - ); - }) - ); - - describe('init', function() { - it('should init tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - }); - - it('should init measurementSegment', function() { - expect(ctx.ctrl.measurementSegment.value).to.be('select measurement'); - }); - }); - - describe('when first tag segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[0].key).to.be('asd'); - expect(ctx.ctrl.tagSegments[0].type).to.be('key'); - }); - - it('should add tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(3); - }); - }); - - describe('when last tag value segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - }); - - it('should update tag value', function() { - expect(ctx.ctrl.target.tags[0].value).to.be('server1'); - }); - - it('should set tag operator', function() { - expect(ctx.ctrl.target.tags[0].operator).to.be('='); - }); - - it('should add plus button for another filter', function() { - expect(ctx.ctrl.tagSegments[3].fake).to.be(true); - }); - }); - - describe('when last tag value segment is updated to regex', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); - }); - - it('should update operator', function() { - expect(ctx.ctrl.tagSegments[1].value).to.be('=~'); - expect(ctx.ctrl.target.tags[0].operator).to.be('=~'); - }); - }); - - describe('when second tag key is added', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[1].key).to.be('key2'); - }); - - it('should add AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('AND'); - }); - }); - - describe('when condition is changed', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); - }); - - it('should update tag condition', function() { - expect(ctx.ctrl.target.tags[1].condition).to.be('OR'); - }); - - it('should update AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('OR'); - expect(ctx.ctrl.tagSegments.length).to.be(7); - }); - }); - - describe('when deleting first tag filter after value is selected', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); - }); - - it('should remove tags', function() { - expect(ctx.ctrl.target.tags.length).to.be(0); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - expect(ctx.ctrl.tagSegments[0].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value after second tag filter is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); -}); From c0f9c06f2163dc57424257b204e6c6c449aa0212 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 29 Jun 2018 13:37:21 +0200 Subject: [PATCH 177/263] Karma to Jest: completer --- .../{completer_specs.ts => completer.jest.ts} | 70 +++++++++---------- 1 file changed, 34 insertions(+), 36 deletions(-) rename public/app/plugins/datasource/prometheus/specs/{completer_specs.ts => completer.jest.ts} (79%) diff --git a/public/app/plugins/datasource/prometheus/specs/completer_specs.ts b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts similarity index 79% rename from public/app/plugins/datasource/prometheus/specs/completer_specs.ts rename to public/app/plugins/datasource/prometheus/specs/completer.jest.ts index 846948340898..cb8dd8e5bd69 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer_specs.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts @@ -1,47 +1,45 @@ -import { describe, it, sinon, expect } from 'test/lib/common'; -import helpers from 'test/specs/helpers'; +//import { describe, it, sinon, expect } from 'test/lib/common'; +//import helpers from 'test/specs/helpers'; import { PromCompleter } from '../completer'; import { PrometheusDatasource } from '../datasource'; +import { BackendSrv } from 'app/core/services/backend_srv'; +jest.mock('../datasource'); +jest.mock('app/core/services/backend_srv'); describe('Prometheus editor completer', function() { - var ctx = new helpers.ServiceTestContext(); - beforeEach(ctx.providePhase(['templateSrv'])); + //beforeEach(ctx.providePhase(['templateSrv'])); function getSessionStub(data) { return { - getTokenAt: sinon.stub().returns(data.currentToken), - getTokens: sinon.stub().returns(data.tokens), - getLine: sinon.stub().returns(data.line), + getTokenAt:jest.fn(()=> (data.currentToken)), + getTokens:jest.fn(()=> (data.tokens)), + getLine:jest.fn(()=> (data.line)), }; } let editor = {}; - let datasourceStub = { - performInstantQuery: sinon - .stub() - .withArgs({ expr: '{__name__="node_cpu"' }) - .returns( - Promise.resolve({ - data: { + + let backendSrv = {} + let datasourceStub = new PrometheusDatasource({},{},backendSrv,{},{}); + + datasourceStub.performInstantQuery = jest.fn(() => Promise.resolve({ data: { - result: [ - { - metric: { - job: 'node', - instance: 'localhost:9100', + data: { + result: [ + { + metric: { + job: 'node', + instance: 'localhost:9100', + }, }, - }, - ], + ], + }, }, - }, - }) - ), - performSuggestQuery: sinon - .stub() - .withArgs('node', true) - .returns(Promise.resolve(['node_cpu'])), - }; + }) + ); + datasourceStub.performSuggestQuery = jest.fn(() => Promise.resolve(['node_cpu'])); + let templateSrv = { variables: [ @@ -62,9 +60,9 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 10 }, '[', (s, res) => { - expect(res[0].caption).to.eql('$__interval'); - expect(res[0].value).to.eql('[$__interval'); - expect(res[0].meta).to.eql('range vector'); + expect(res[0].caption).toEqual('$__interval'); + expect(res[0].value).toEqual('[$__interval'); + expect(res[0].meta).toEqual('range vector'); }); }); }); @@ -93,7 +91,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 10 }, 'j', (s, res) => { - expect(res[0].meta).to.eql('label name'); + expect(res[0].meta).toEqual('label name'); }); }); }); @@ -125,7 +123,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 23 }, 'j', (s, res) => { - expect(res[0].meta).to.eql('label name'); + expect(res[0].meta).toEqual('label name'); }); }); }); @@ -156,7 +154,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 15 }, 'n', (s, res) => { - expect(res[0].meta).to.eql('label value'); + expect(res[0].meta).toEqual('label value'); }); }); }); @@ -192,7 +190,7 @@ describe('Prometheus editor completer', function() { }); return completer.getCompletions(editor, session, { row: 0, column: 23 }, 'm', (s, res) => { - expect(res[0].meta).to.eql('label name'); + expect(res[0].meta).toEqual('label name'); }); }); }); From 49a8c2e0c138118f4e1bc3bfa37446eba596b98c Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 29 Jun 2018 13:44:11 +0200 Subject: [PATCH 178/263] Make beautiful --- .../prometheus/specs/completer.jest.ts | 40 +++++++++---------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts index cb8dd8e5bd69..b401cb9bf657 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts @@ -12,35 +12,35 @@ describe('Prometheus editor completer', function() { function getSessionStub(data) { return { - getTokenAt:jest.fn(()=> (data.currentToken)), - getTokens:jest.fn(()=> (data.tokens)), - getLine:jest.fn(()=> (data.line)), + getTokenAt: jest.fn(() => data.currentToken), + getTokens: jest.fn(() => data.tokens), + getLine: jest.fn(() => data.line), }; } let editor = {}; - let backendSrv = {} - let datasourceStub = new PrometheusDatasource({},{},backendSrv,{},{}); - - datasourceStub.performInstantQuery = jest.fn(() => Promise.resolve({ - data: { - data: { - result: [ - { - metric: { - job: 'node', - instance: 'localhost:9100', - }, - }, - ], + let backendSrv = {}; + let datasourceStub = new PrometheusDatasource({}, {}, backendSrv, {}, {}); + + datasourceStub.performInstantQuery = jest.fn(() => + Promise.resolve({ + data: { + data: { + result: [ + { + metric: { + job: 'node', + instance: 'localhost:9100', }, }, - }) - ); + ], + }, + }, + }) + ); datasourceStub.performSuggestQuery = jest.fn(() => Promise.resolve(['node_cpu'])); - let templateSrv = { variables: [ { From d2f81d52d4b121cbc0bc6c39527900a4c5cf2042 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 2 Jul 2018 09:43:34 +0200 Subject: [PATCH 179/263] Karma to Jest: begin influx query_ctrl --- .../influxdb/specs/query_ctrl.jest.ts | 222 ++++++++++++++++++ .../influxdb/specs/query_ctrl_specs.ts | 193 --------------- 2 files changed, 222 insertions(+), 193 deletions(-) create mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts delete mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts new file mode 100644 index 000000000000..dd6c9b4fa189 --- /dev/null +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -0,0 +1,222 @@ +import '../query_ctrl'; +import 'app/core/services/segment_srv'; +import { uiSegmentSrv } from 'app/core/services/segment_srv'; +//import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; +//import helpers from 'test/specs/helpers'; +import { InfluxQueryCtrl } from '../query_ctrl'; + +describe('InfluxDBQueryCtrl', () => { + //var ctx = new helpers.ControllerTestContext(); + + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.module(($ =>compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + // beforeEach(ctx.providePhase()); + + // beforeEach( + // angularMocks.inject(($rootScope, $controller, $q) => { + // ctx.$q = $q; + // ctx.scope = $rootScope.$new(); + // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); + // ctx.target = { target: {} }; + // ctx.panelCtrl = { + // panel: { + // targets: [ctx.target], + // }, + // }; + // ctx.panelCtrl.refresh = sinon.spy(); + // influxQueryCtrl = $controller( + // InfluxQueryCtrl, + // { $scope: ctx.scope }, + // { + // panelCtrl: ctx.panelCtrl, + // target: ctx.target, + // datasource: ctx.datasource, + // } + // ); + // }) + // ); + + InfluxQueryCtrl.prototype.target = { target: {} }; + InfluxQueryCtrl.prototype.panelCtrl = { + refresh: jest.fn(), + panel: { + targets: InfluxQueryCtrl.prototype.target, + }, + }; + InfluxQueryCtrl.prototype.datasource = { + metricFindQuery: jest.fn(() => Promise.resolve([])), + }; + + // let uiSegmentSrv = { + // newPlusButton: jest.fn(), + // newSegment: jest.fn(), + // newSelectMeasurement: jest.fn() + // }; + let influxQueryCtrl; + + beforeEach(() => { + influxQueryCtrl = new InfluxQueryCtrl( + {}, + {}, + {}, + {}, + new uiSegmentSrv({ trustAsHtml: jest.fn() }, { highlightVariablesAsHtml: jest.fn() }) + ); + }); + describe('init', () => { + it('should init tagSegments', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(1); + }); + + it('should init measurementSegment', () => { + expect(influxQueryCtrl.measurementSegment.value).toBe('select measurement'); + }); + }); + + describe('when first tag segment is updated', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + }); + + it('should update tag key', () => { + expect(influxQueryCtrl.target.tags[0].key).toBe('asd'); + expect(influxQueryCtrl.tagSegments[0].type).toBe('key'); + }); + + it('should add tagSegments', () => { + console.log(influxQueryCtrl.tagSegments); + expect(influxQueryCtrl.tagSegments.length).toBe(3); + }); + }); + + describe('when last tag value segment is updated', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + }); + + it('should update tag value', () => { + expect(influxQueryCtrl.target.tags[0].value).toBe('server1'); + }); + + it('should set tag operator', () => { + expect(influxQueryCtrl.target.tags[0].operator).toBe('='); + }); + + it('should add plus button for another filter', () => { + expect(influxQueryCtrl.tagSegments[3].fake).toBe(true); + }); + }); + + describe('when last tag value segment is updated to regex', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); + }); + + it('should update operator', () => { + expect(influxQueryCtrl.tagSegments[1].value).toBe('=~'); + expect(influxQueryCtrl.target.tags[0].operator).toBe('=~'); + }); + }); + + describe('when second tag key is added', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + }); + + it('should update tag key', () => { + expect(influxQueryCtrl.target.tags[1].key).toBe('key2'); + }); + + it('should add AND segment', () => { + expect(influxQueryCtrl.tagSegments[3].value).toBe('AND'); + }); + }); + + describe('when condition is changed', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + influxQueryCtrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); + }); + + it('should update tag condition', () => { + expect(influxQueryCtrl.target.tags[1].condition).toBe('OR'); + }); + + it('should update AND segment', () => { + expect(influxQueryCtrl.tagSegments[3].value).toBe('OR'); + expect(influxQueryCtrl.tagSegments.length).toBe(7); + }); + }); + + describe('when deleting first tag filter after value is selected', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 0); + }); + + it('should remove tags', () => { + expect(influxQueryCtrl.target.tags.length).toBe(0); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(1); + expect(influxQueryCtrl.tagSegments[0].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(4); + expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(4); + expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); + }); + }); + + describe('when deleting second tag value after second tag filter is complete', () => { + beforeEach(() => { + influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + influxQueryCtrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); + influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', () => { + expect(influxQueryCtrl.tagSegments.length).toBe(4); + expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); + }); + }); +}); diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts deleted file mode 100644 index 4daa48d6b9d3..000000000000 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts +++ /dev/null @@ -1,193 +0,0 @@ -import '../query_ctrl'; -import 'app/core/services/segment_srv'; -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; -import helpers from 'test/specs/helpers'; -import { InfluxQueryCtrl } from '../query_ctrl'; - -describe('InfluxDBQueryCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - beforeEach(ctx.providePhase()); - - beforeEach( - angularMocks.inject(($rootScope, $controller, $q) => { - ctx.$q = $q; - ctx.scope = $rootScope.$new(); - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - ctx.target = { target: {} }; - ctx.panelCtrl = { - panel: { - targets: [ctx.target], - }, - }; - ctx.panelCtrl.refresh = sinon.spy(); - ctx.ctrl = $controller( - InfluxQueryCtrl, - { $scope: ctx.scope }, - { - panelCtrl: ctx.panelCtrl, - target: ctx.target, - datasource: ctx.datasource, - } - ); - }) - ); - - describe('init', function() { - it('should init tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - }); - - it('should init measurementSegment', function() { - expect(ctx.ctrl.measurementSegment.value).to.be('select measurement'); - }); - }); - - describe('when first tag segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[0].key).to.be('asd'); - expect(ctx.ctrl.tagSegments[0].type).to.be('key'); - }); - - it('should add tagSegments', function() { - expect(ctx.ctrl.tagSegments.length).to.be(3); - }); - }); - - describe('when last tag value segment is updated', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - }); - - it('should update tag value', function() { - expect(ctx.ctrl.target.tags[0].value).to.be('server1'); - }); - - it('should set tag operator', function() { - expect(ctx.ctrl.target.tags[0].operator).to.be('='); - }); - - it('should add plus button for another filter', function() { - expect(ctx.ctrl.tagSegments[3].fake).to.be(true); - }); - }); - - describe('when last tag value segment is updated to regex', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); - }); - - it('should update operator', function() { - expect(ctx.ctrl.tagSegments[1].value).to.be('=~'); - expect(ctx.ctrl.target.tags[0].operator).to.be('=~'); - }); - }); - - describe('when second tag key is added', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - }); - - it('should update tag key', function() { - expect(ctx.ctrl.target.tags[1].key).to.be('key2'); - }); - - it('should add AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('AND'); - }); - }); - - describe('when condition is changed', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); - }); - - it('should update tag condition', function() { - expect(ctx.ctrl.target.tags[1].condition).to.be('OR'); - }); - - it('should update AND segment', function() { - expect(ctx.ctrl.tagSegments[3].value).to.be('OR'); - expect(ctx.ctrl.tagSegments.length).to.be(7); - }); - }); - - describe('when deleting first tag filter after value is selected', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); - }); - - it('should remove tags', function() { - expect(ctx.ctrl.target.tags.length).to.be(0); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(1); - expect(ctx.ctrl.tagSegments[0].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); - - describe('when deleting second tag value after second tag filter is complete', function() { - beforeEach(function() { - ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); - ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', function() { - expect(ctx.ctrl.tagSegments.length).to.be(4); - expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); - }); - }); -}); From d6381bed7cebe7c0270bf0ddacc8333e17fb9658 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Mon, 2 Jul 2018 14:34:58 +0200 Subject: [PATCH 180/263] Test fail depending on test order --- .../plugins/datasource/influxdb/query_ctrl.ts | 2 +- .../influxdb/specs/query_ctrl.jest.ts | 4 +- .../influxdb/specs/query_ctrl_specs.ts | 195 ++++++++++++++++++ 3 files changed, 198 insertions(+), 3 deletions(-) create mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts diff --git a/public/app/plugins/datasource/influxdb/query_ctrl.ts b/public/app/plugins/datasource/influxdb/query_ctrl.ts index ce669c9f4589..174497111433 100644 --- a/public/app/plugins/datasource/influxdb/query_ctrl.ts +++ b/public/app/plugins/datasource/influxdb/query_ctrl.ts @@ -338,7 +338,7 @@ export class InfluxQueryCtrl extends QueryCtrl { this.tagSegments.push(this.uiSegmentSrv.newPlusButton()); } } - + console.log(this.tagSegments); this.rebuildTargetTagConditions(); } diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts index dd6c9b4fa189..0c1ed3ed6b20 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts @@ -46,7 +46,7 @@ describe('InfluxDBQueryCtrl', () => { InfluxQueryCtrl.prototype.panelCtrl = { refresh: jest.fn(), panel: { - targets: InfluxQueryCtrl.prototype.target, + targets: [InfluxQueryCtrl.prototype.target], }, }; InfluxQueryCtrl.prototype.datasource = { @@ -69,6 +69,7 @@ describe('InfluxDBQueryCtrl', () => { new uiSegmentSrv({ trustAsHtml: jest.fn() }, { highlightVariablesAsHtml: jest.fn() }) ); }); + describe('init', () => { it('should init tagSegments', () => { expect(influxQueryCtrl.tagSegments.length).toBe(1); @@ -90,7 +91,6 @@ describe('InfluxDBQueryCtrl', () => { }); it('should add tagSegments', () => { - console.log(influxQueryCtrl.tagSegments); expect(influxQueryCtrl.tagSegments.length).toBe(3); }); }); diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts new file mode 100644 index 000000000000..151dd7ab0c6d --- /dev/null +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts @@ -0,0 +1,195 @@ +import '../query_ctrl'; +import 'app/core/services/segment_srv'; +import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; +import helpers from 'test/specs/helpers'; +import { InfluxQueryCtrl } from '../query_ctrl'; + +describe('InfluxDBQueryCtrl', function() { + var ctx = new helpers.ControllerTestContext(); + + beforeEach(angularMocks.module('grafana.core')); + beforeEach(angularMocks.module('grafana.controllers')); + beforeEach(angularMocks.module('grafana.services')); + beforeEach( + angularMocks.module(function($compileProvider) { + $compileProvider.preAssignBindingsEnabled(true); + }) + ); + beforeEach(ctx.providePhase()); + + beforeEach( + angularMocks.inject(($rootScope, $controller, $q) => { + ctx.$q = $q; + ctx.scope = $rootScope.$new(); + ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); + ctx.target = { target: {} }; + ctx.panelCtrl = { + panel: { + targets: [ctx.target], + }, + }; + ctx.panelCtrl.refresh = sinon.spy(); + ctx.ctrl = $controller( + InfluxQueryCtrl, + { $scope: ctx.scope }, + { + panelCtrl: ctx.panelCtrl, + target: ctx.target, + datasource: ctx.datasource, + } + ); + }) + ); + + describe('init', function() { + it('should init tagSegments', function() { + expect(ctx.ctrl.tagSegments.length).to.be(1); + }); + + it('should init measurementSegment', function() { + expect(ctx.ctrl.measurementSegment.value).to.be('select measurement'); + }); + }); + + describe('when first tag segment is updated', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + }); + + it('should update tag key', function() { + console.log(ctx.ctrl.target.tags); + expect(ctx.ctrl.target.tags[0].key).to.be('asd'); + expect(ctx.ctrl.tagSegments[0].type).to.be('key'); + }); + + it('should add tagSegments', function() { + console.log(ctx.ctrl.tagSegments); + expect(ctx.ctrl.tagSegments.length).to.be(3); + }); + }); + + describe('when last tag value segment is updated', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + }); + + it('should update tag value', function() { + expect(ctx.ctrl.target.tags[0].value).to.be('server1'); + }); + + it('should set tag operator', function() { + expect(ctx.ctrl.target.tags[0].operator).to.be('='); + }); + + it('should add plus button for another filter', function() { + expect(ctx.ctrl.tagSegments[3].fake).to.be(true); + }); + }); + + describe('when last tag value segment is updated to regex', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); + }); + + it('should update operator', function() { + expect(ctx.ctrl.tagSegments[1].value).to.be('=~'); + expect(ctx.ctrl.target.tags[0].operator).to.be('=~'); + }); + }); + + describe('when second tag key is added', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + }); + + it('should update tag key', function() { + expect(ctx.ctrl.target.tags[1].key).to.be('key2'); + }); + + it('should add AND segment', function() { + expect(ctx.ctrl.tagSegments[3].value).to.be('AND'); + }); + }); + + describe('when condition is changed', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); + }); + + it('should update tag condition', function() { + expect(ctx.ctrl.target.tags[1].condition).to.be('OR'); + }); + + it('should update AND segment', function() { + expect(ctx.ctrl.tagSegments[3].value).to.be('OR'); + expect(ctx.ctrl.tagSegments.length).to.be(7); + }); + }); + + describe('when deleting first tag filter after value is selected', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 0); + }); + + it('should remove tags', function() { + expect(ctx.ctrl.target.tags.length).to.be(0); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).to.be(1); + expect(ctx.ctrl.tagSegments[0].type).to.be('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).to.be(4); + expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); + }); + }); + + describe('when deleting second tag value before second tag value is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).to.be(4); + expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); + }); + }); + + describe('when deleting second tag value after second tag filter is complete', function() { + beforeEach(function() { + ctx.ctrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); + ctx.ctrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); + ctx.ctrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); + ctx.ctrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); + ctx.ctrl.tagSegmentUpdated(ctx.ctrl.removeTagFilterSegment, 4); + }); + + it('should remove all segment after 2 and replace with plus button', function() { + expect(ctx.ctrl.tagSegments.length).to.be(4); + expect(ctx.ctrl.tagSegments[3].type).to.be('plus-button'); + }); + }); +}); From 51caf470f50c07fdb7f6d47d7fe022f2ebfc1ac5 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 14:55:54 +0200 Subject: [PATCH 181/263] Remove influx qeury_ctrl jest, as it is already completed --- .../influxdb/specs/query_ctrl.jest.ts | 222 ------------------ .../prometheus/specs/completer.jest.ts | 3 - 2 files changed, 225 deletions(-) delete mode 100644 public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts deleted file mode 100644 index 0c1ed3ed6b20..000000000000 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl.jest.ts +++ /dev/null @@ -1,222 +0,0 @@ -import '../query_ctrl'; -import 'app/core/services/segment_srv'; -import { uiSegmentSrv } from 'app/core/services/segment_srv'; -//import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; -//import helpers from 'test/specs/helpers'; -import { InfluxQueryCtrl } from '../query_ctrl'; - -describe('InfluxDBQueryCtrl', () => { - //var ctx = new helpers.ControllerTestContext(); - - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach( - // angularMocks.module(($ =>compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - // beforeEach(ctx.providePhase()); - - // beforeEach( - // angularMocks.inject(($rootScope, $controller, $q) => { - // ctx.$q = $q; - // ctx.scope = $rootScope.$new(); - // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - // ctx.target = { target: {} }; - // ctx.panelCtrl = { - // panel: { - // targets: [ctx.target], - // }, - // }; - // ctx.panelCtrl.refresh = sinon.spy(); - // influxQueryCtrl = $controller( - // InfluxQueryCtrl, - // { $scope: ctx.scope }, - // { - // panelCtrl: ctx.panelCtrl, - // target: ctx.target, - // datasource: ctx.datasource, - // } - // ); - // }) - // ); - - InfluxQueryCtrl.prototype.target = { target: {} }; - InfluxQueryCtrl.prototype.panelCtrl = { - refresh: jest.fn(), - panel: { - targets: [InfluxQueryCtrl.prototype.target], - }, - }; - InfluxQueryCtrl.prototype.datasource = { - metricFindQuery: jest.fn(() => Promise.resolve([])), - }; - - // let uiSegmentSrv = { - // newPlusButton: jest.fn(), - // newSegment: jest.fn(), - // newSelectMeasurement: jest.fn() - // }; - let influxQueryCtrl; - - beforeEach(() => { - influxQueryCtrl = new InfluxQueryCtrl( - {}, - {}, - {}, - {}, - new uiSegmentSrv({ trustAsHtml: jest.fn() }, { highlightVariablesAsHtml: jest.fn() }) - ); - }); - - describe('init', () => { - it('should init tagSegments', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(1); - }); - - it('should init measurementSegment', () => { - expect(influxQueryCtrl.measurementSegment.value).toBe('select measurement'); - }); - }); - - describe('when first tag segment is updated', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - }); - - it('should update tag key', () => { - expect(influxQueryCtrl.target.tags[0].key).toBe('asd'); - expect(influxQueryCtrl.tagSegments[0].type).toBe('key'); - }); - - it('should add tagSegments', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(3); - }); - }); - - describe('when last tag value segment is updated', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - }); - - it('should update tag value', () => { - expect(influxQueryCtrl.target.tags[0].value).toBe('server1'); - }); - - it('should set tag operator', () => { - expect(influxQueryCtrl.target.tags[0].operator).toBe('='); - }); - - it('should add plus button for another filter', () => { - expect(influxQueryCtrl.tagSegments[3].fake).toBe(true); - }); - }); - - describe('when last tag value segment is updated to regex', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: '/server.*/', type: 'value' }, 2); - }); - - it('should update operator', () => { - expect(influxQueryCtrl.tagSegments[1].value).toBe('=~'); - expect(influxQueryCtrl.target.tags[0].operator).toBe('=~'); - }); - }); - - describe('when second tag key is added', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - }); - - it('should update tag key', () => { - expect(influxQueryCtrl.target.tags[1].key).toBe('key2'); - }); - - it('should add AND segment', () => { - expect(influxQueryCtrl.tagSegments[3].value).toBe('AND'); - }); - }); - - describe('when condition is changed', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - influxQueryCtrl.tagSegmentUpdated({ value: 'OR', type: 'condition' }, 3); - }); - - it('should update tag condition', () => { - expect(influxQueryCtrl.target.tags[1].condition).toBe('OR'); - }); - - it('should update AND segment', () => { - expect(influxQueryCtrl.tagSegments[3].value).toBe('OR'); - expect(influxQueryCtrl.tagSegments.length).toBe(7); - }); - }); - - describe('when deleting first tag filter after value is selected', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 0); - }); - - it('should remove tags', () => { - expect(influxQueryCtrl.target.tags.length).toBe(0); - }); - - it('should remove all segment after 2 and replace with plus button', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(1); - expect(influxQueryCtrl.tagSegments[0].type).toBe('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(4); - expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); - }); - }); - - describe('when deleting second tag value before second tag value is complete', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(4); - expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); - }); - }); - - describe('when deleting second tag value after second tag filter is complete', () => { - beforeEach(() => { - influxQueryCtrl.tagSegmentUpdated({ value: 'asd', type: 'plus-button' }, 0); - influxQueryCtrl.tagSegmentUpdated({ value: 'server1', type: 'value' }, 2); - influxQueryCtrl.tagSegmentUpdated({ value: 'key2', type: 'plus-button' }, 3); - influxQueryCtrl.tagSegmentUpdated({ value: 'value', type: 'value' }, 6); - influxQueryCtrl.tagSegmentUpdated(influxQueryCtrl.removeTagFilterSegment, 4); - }); - - it('should remove all segment after 2 and replace with plus button', () => { - expect(influxQueryCtrl.tagSegments.length).toBe(4); - expect(influxQueryCtrl.tagSegments[3].type).toBe('plus-button'); - }); - }); -}); diff --git a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts index b401cb9bf657..fbe2dce0ce50 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts @@ -1,6 +1,3 @@ -//import { describe, it, sinon, expect } from 'test/lib/common'; -//import helpers from 'test/specs/helpers'; - import { PromCompleter } from '../completer'; import { PrometheusDatasource } from '../datasource'; import { BackendSrv } from 'app/core/services/backend_srv'; From b81621b6f5019e12893fdddde32b7850aabbad61 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 15:24:44 +0200 Subject: [PATCH 182/263] changelog: add notes about closing #12636 #9827 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 160aab9b91a3..4917c5998d05 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,6 +28,7 @@ * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) +* **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) # 5.2.1 (2018-06-29) From 3dab4e1b52c1a4e7712abd5c20da14a4736b8ca4 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 15:27:13 +0200 Subject: [PATCH 183/263] changelog: add notes about closing #12589 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4917c5998d05..826507e1bd65 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) * **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) +* **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) # 5.2.1 (2018-06-29) From 25c8233523d317a378f628258b86d88686b1a744 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 4 Jul 2018 09:22:39 +0200 Subject: [PATCH 184/263] Begin conversion --- ...query_ctrl_specs.ts => query_ctrl.jest.ts} | 95 +++++++++++-------- 1 file changed, 53 insertions(+), 42 deletions(-) rename public/app/plugins/datasource/graphite/specs/{query_ctrl_specs.ts => query_ctrl.jest.ts} (84%) diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts similarity index 84% rename from public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts rename to public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts index b4f7718930f3..776dec0a1a78 100644 --- a/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts +++ b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts @@ -6,48 +6,59 @@ import helpers from 'test/specs/helpers'; import { GraphiteQueryCtrl } from '../query_ctrl'; describe('GraphiteQueryCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - - beforeEach(ctx.providePhase()); - beforeEach( - angularMocks.inject(($rootScope, $controller, $q) => { - ctx.$q = $q; - ctx.scope = $rootScope.$new(); - ctx.target = { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }; - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - ctx.datasource.getFuncDefs = sinon.stub().returns(ctx.$q.when(gfunc.getFuncDefs('1.0'))); - ctx.datasource.getFuncDef = gfunc.getFuncDef; - ctx.datasource.waitForFuncDefsLoaded = sinon.stub().returns(ctx.$q.when(null)); - ctx.datasource.createFuncInstance = gfunc.createFuncInstance; - ctx.panelCtrl = { panel: {} }; - ctx.panelCtrl = { - panel: { - targets: [ctx.target], - }, - }; - ctx.panelCtrl.refresh = sinon.spy(); - - ctx.ctrl = $controller( - GraphiteQueryCtrl, - { $scope: ctx.scope }, - { - panelCtrl: ctx.panelCtrl, - datasource: ctx.datasource, - target: ctx.target, - } - ); - ctx.scope.$digest(); - }) - ); + + let datasource = { + metricFindQuery: jest.fn(() => Promise.resolve([])), + getFuncDefs: jest.fn(() => Promise.resolve(gfunc.getFuncDefs('1.0'))), + getFuncDef: gfunc.getFuncDef, + waitForFuncDefsLoaded: jest.fn(() => Promise.resolve(null)), + createFuncInstance: gfunc.createFuncInstance, + + }; + let ctx = { + + }; + + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.module(function($compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + + //beforeEach(ctx.providePhase()); + // beforeEach( + // angularMocks.inject(($rootScope, $controller, $q) => { + // ctx.$q = $q; + // ctx.scope = $rootScope.$new(); + // ctx.target = { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }; + // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); + // ctx.datasource.getFuncDefs = sinon.stub().returns(ctx.$q.when(gfunc.getFuncDefs('1.0'))); + // ctx.datasource.getFuncDef = gfunc.getFuncDef; + // ctx.datasource.waitForFuncDefsLoaded = sinon.stub().returns(ctx.$q.when(null)); + // ctx.datasource.createFuncInstance = gfunc.createFuncInstance; + // ctx.panelCtrl = { panel: {} }; + // ctx.panelCtrl = { + // panel: { + // targets: [ctx.target], + // }, + // }; + // ctx.panelCtrl.refresh = sinon.spy(); + + // ctx.ctrl = $controller( + // GraphiteQueryCtrl, + // { $scope: ctx.scope }, + // { + // panelCtrl: ctx.panelCtrl, + // datasource: ctx.datasource, + // target: ctx.target, + // } + // ); + // ctx.scope.$digest(); + // }) + // ); describe('init', function() { it('should validate metric key exists', function() { From b58a7642dc6b3be313a30be95b455fd6141f8da9 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 15:39:56 +0200 Subject: [PATCH 185/263] Karma to Jest --- .../graphite/specs/query_ctrl.jest.ts | 271 ++++++++++-------- 1 file changed, 145 insertions(+), 126 deletions(-) diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts index 776dec0a1a78..58cefeef6f6a 100644 --- a/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts @@ -1,22 +1,27 @@ -import 'app/core/services/segment_srv'; -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; +import { uiSegmentSrv } from 'app/core/services/segment_srv'; +// import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; import gfunc from '../gfunc'; -import helpers from 'test/specs/helpers'; +// import helpers from 'test/specs/helpers'; import { GraphiteQueryCtrl } from '../query_ctrl'; -describe('GraphiteQueryCtrl', function() { - - let datasource = { - metricFindQuery: jest.fn(() => Promise.resolve([])), - getFuncDefs: jest.fn(() => Promise.resolve(gfunc.getFuncDefs('1.0'))), - getFuncDef: gfunc.getFuncDef, - waitForFuncDefsLoaded: jest.fn(() => Promise.resolve(null)), - createFuncInstance: gfunc.createFuncInstance, - +describe('GraphiteQueryCtrl', () => { + let ctx = { + datasource: { + metricFindQuery: jest.fn(() => Promise.resolve([])), + getFuncDefs: jest.fn(() => Promise.resolve(gfunc.getFuncDefs('1.0'))), + getFuncDef: gfunc.getFuncDef, + waitForFuncDefsLoaded: jest.fn(() => Promise.resolve(null)), + createFuncInstance: gfunc.createFuncInstance, + }, + target: { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }, + panelCtrl: { + refresh: jest.fn(), + }, }; - let ctx = { + ctx.panelCtrl.panel = { + targets: [ctx.target], }; // beforeEach(angularMocks.module('grafana.core')); @@ -60,156 +65,170 @@ describe('GraphiteQueryCtrl', function() { // }) // ); - describe('init', function() { - it('should validate metric key exists', function() { - expect(ctx.datasource.metricFindQuery.getCall(0).args[0]).to.be('test.prod.*'); + beforeEach(() => { + GraphiteQueryCtrl.prototype.target = ctx.target; + GraphiteQueryCtrl.prototype.datasource = ctx.datasource; + + GraphiteQueryCtrl.prototype.panelCtrl = ctx.panelCtrl; + + ctx.ctrl = new GraphiteQueryCtrl( + {}, + {}, + new uiSegmentSrv({ trustAsHtml: html => html }, { highlightVariablesAsHtml: () => {} }), + {}, + {} + ); + }); + + describe('init', () => { + it('should validate metric key exists', () => { + expect(ctx.datasource.metricFindQuery.mock.calls[0][0]).toBe('test.prod.*'); }); - it('should delete last segment if no metrics are found', function() { - expect(ctx.ctrl.segments[2].value).to.be('select metric'); + it('should delete last segment if no metrics are found', () => { + expect(ctx.ctrl.segments[2].value).toBe('select metric'); }); - it('should parse expression and build function model', function() { - expect(ctx.ctrl.queryModel.functions.length).to.be(2); + it('should parse expression and build function model', () => { + expect(ctx.ctrl.queryModel.functions.length).toBe(2); }); }); - describe('when adding function', function() { - beforeEach(function() { + describe('when adding function', () => { + beforeEach(() => { ctx.ctrl.target.target = 'test.prod.*.count'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.addFunction(gfunc.getFuncDef('aliasByNode')); }); - it('should add function with correct node number', function() { - expect(ctx.ctrl.queryModel.functions[0].params[0]).to.be(2); + it('should add function with correct node number', () => { + expect(ctx.ctrl.queryModel.functions[0].params[0]).toBe(2); }); - it('should update target', function() { - expect(ctx.ctrl.target.target).to.be('aliasByNode(test.prod.*.count, 2)'); + it('should update target', () => { + expect(ctx.ctrl.target.target).toBe('aliasByNode(test.prod.*.count, 2)'); }); - it('should call refresh', function() { - expect(ctx.panelCtrl.refresh.called).to.be(true); + it('should call refresh', () => { + expect(ctx.panelCtrl.refresh).toHaveBeenCalled(); }); }); - describe('when adding function before any metric segment', function() { - beforeEach(function() { + describe('when adding function before any metric segment', () => { + beforeEach(() => { ctx.ctrl.target.target = ''; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([{ expandable: true }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: true }]); ctx.ctrl.parseTarget(); ctx.ctrl.addFunction(gfunc.getFuncDef('asPercent')); }); - it('should add function and remove select metric link', function() { - expect(ctx.ctrl.segments.length).to.be(0); + it('should add function and remove select metric link', () => { + expect(ctx.ctrl.segments.length).toBe(0); }); }); - describe('when initializing target without metric expression and only function', function() { - beforeEach(function() { + describe('when initializing target without metric expression and only function', () => { + beforeEach(() => { ctx.ctrl.target.target = 'asPercent(#A, #B)'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); ctx.ctrl.parseTarget(); - ctx.scope.$digest(); }); - it('should not add select metric segment', function() { - expect(ctx.ctrl.segments.length).to.be(1); + it('should not add select metric segment', () => { + expect(ctx.ctrl.segments.length).toBe(1); }); - it('should add second series ref as param', function() { - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); + it('should add second series ref as param', () => { + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); }); }); - describe('when initializing a target with single param func using variable', function() { - beforeEach(function() { + describe('when initializing a target with single param func using variable', () => { + beforeEach(() => { ctx.ctrl.target.target = 'movingAverage(prod.count, $var)'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); ctx.ctrl.parseTarget(); }); - it('should add 2 segments', function() { - expect(ctx.ctrl.segments.length).to.be(2); + it('should add 2 segments', () => { + expect(ctx.ctrl.segments.length).toBe(2); }); - it('should add function param', function() { - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); + it('should add function param', () => { + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); }); }); - describe('when initializing target without metric expression and function with series-ref', function() { - beforeEach(function() { + describe('when initializing target without metric expression and function with series-ref', () => { + beforeEach(() => { ctx.ctrl.target.target = 'asPercent(metric.node.count, #A)'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); ctx.ctrl.parseTarget(); }); - it('should add segments', function() { - expect(ctx.ctrl.segments.length).to.be(3); + it('should add segments', () => { + expect(ctx.ctrl.segments.length).toBe(3); }); - it('should have correct func params', function() { - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); + it('should have correct func params', () => { + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); }); }); - describe('when getting altSegments and metricFindQuery returns empty array', function() { - beforeEach(function() { + describe('when getting altSegments and metricFindQuery returns empty array', () => { + beforeEach(() => { ctx.ctrl.target.target = 'test.count'; - ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([]); ctx.ctrl.parseTarget(); ctx.ctrl.getAltSegments(1).then(function(results) { ctx.altSegments = results; }); - ctx.scope.$digest(); }); - it('should have no segments', function() { - expect(ctx.altSegments.length).to.be(0); + it('should have no segments', () => { + expect(ctx.altSegments.length).toBe(0); }); }); - describe('targetChanged', function() { - beforeEach(function() { - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + describe('targetChanged', () => { + beforeEach(() => { + ctx.ctrl.target.target = 'aliasByNode(scaleToSeconds(test.prod.*, 1), 2)'; + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.target.target = ''; ctx.ctrl.targetChanged(); }); - it('should rebuld target after expression model', function() { - expect(ctx.ctrl.target.target).to.be('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)'); + it('should rebuild target after expression model', () => { + expect(ctx.ctrl.target.target).toBe('aliasByNode(scaleToSeconds(test.prod.*, 1), 2)'); }); - it('should call panelCtrl.refresh', function() { - expect(ctx.panelCtrl.refresh.called).to.be(true); + it('should call panelCtrl.refresh', () => { + expect(ctx.panelCtrl.refresh).toHaveBeenCalled(); }); }); - describe('when updating targets with nested query', function() { - beforeEach(function() { + describe('when updating targets with nested query', () => { + beforeEach(() => { ctx.ctrl.target.target = 'scaleToSeconds(#A, 60)'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); }); - it('should add function params', function() { - expect(ctx.ctrl.queryModel.segments.length).to.be(1); - expect(ctx.ctrl.queryModel.segments[0].value).to.be('#A'); + it('should add function params', () => { + expect(ctx.ctrl.queryModel.segments.length).toBe(1); + expect(ctx.ctrl.queryModel.segments[0].value).toBe('#A'); - expect(ctx.ctrl.queryModel.functions[0].params.length).to.be(1); - expect(ctx.ctrl.queryModel.functions[0].params[0]).to.be(60); + expect(ctx.ctrl.queryModel.functions[0].params.length).toBe(1); + expect(ctx.ctrl.queryModel.functions[0].params[0]).toBe(60); }); - it('target should remain the same', function() { - expect(ctx.ctrl.target.target).to.be('scaleToSeconds(#A, 60)'); + it('target should remain the same', () => { + expect(ctx.ctrl.target.target).toBe('scaleToSeconds(#A, 60)'); }); - it('targetFull should include nested queries', function() { + it('targetFull should include nested queries', () => { ctx.ctrl.panelCtrl.panel.targets = [ { target: 'nested.query.count', @@ -219,17 +238,17 @@ describe('GraphiteQueryCtrl', function() { ctx.ctrl.updateModelTarget(); - expect(ctx.ctrl.target.target).to.be('scaleToSeconds(#A, 60)'); + expect(ctx.ctrl.target.target).toBe('scaleToSeconds(#A, 60)'); - expect(ctx.ctrl.target.targetFull).to.be('scaleToSeconds(nested.query.count, 60)'); + expect(ctx.ctrl.target.targetFull).toBe('scaleToSeconds(nested.query.count, 60)'); }); }); - describe('when updating target used in other query', function() { - beforeEach(function() { + describe('when updating target used in other query', () => { + beforeEach(() => { ctx.ctrl.target.target = 'metrics.a.count'; ctx.ctrl.target.refId = 'A'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.panelCtrl.panel.targets = [ctx.ctrl.target, { target: 'sumSeries(#A)', refId: 'B' }]; @@ -237,113 +256,113 @@ describe('GraphiteQueryCtrl', function() { ctx.ctrl.updateModelTarget(); }); - it('targetFull of other query should update', function() { - expect(ctx.ctrl.panel.targets[1].targetFull).to.be('sumSeries(metrics.a.count)'); + it('targetFull of other query should update', () => { + expect(ctx.ctrl.panel.targets[1].targetFull).toBe('sumSeries(metrics.a.count)'); }); }); - describe('when adding seriesByTag function', function() { - beforeEach(function() { + describe('when adding seriesByTag function', () => { + beforeEach(() => { ctx.ctrl.target.target = ''; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.addFunction(gfunc.getFuncDef('seriesByTag')); }); - it('should update functions', function() { - expect(ctx.ctrl.queryModel.getSeriesByTagFuncIndex()).to.be(0); + it('should update functions', () => { + expect(ctx.ctrl.queryModel.getSeriesByTagFuncIndex()).toBe(0); }); - it('should update seriesByTagUsed flag', function() { - expect(ctx.ctrl.queryModel.seriesByTagUsed).to.be(true); + it('should update seriesByTagUsed flag', () => { + expect(ctx.ctrl.queryModel.seriesByTagUsed).toBe(true); }); - it('should update target', function() { - expect(ctx.ctrl.target.target).to.be('seriesByTag()'); + it('should update target', () => { + expect(ctx.ctrl.target.target).toBe('seriesByTag()'); }); - it('should call refresh', function() { - expect(ctx.panelCtrl.refresh.called).to.be(true); + it('should call refresh', () => { + expect(ctx.panelCtrl.refresh).toHaveBeenCalled(); }); }); - describe('when parsing seriesByTag function', function() { - beforeEach(function() { + describe('when parsing seriesByTag function', () => { + beforeEach(() => { ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); }); - it('should add tags', function() { + it('should add tags', () => { const expected = [ { key: 'tag1', operator: '=', value: 'value1' }, { key: 'tag2', operator: '!=~', value: 'value2' }, ]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); + expect(ctx.ctrl.queryModel.tags).toEqual(expected); }); - it('should add plus button', function() { - expect(ctx.ctrl.addTagSegments.length).to.be(1); + it('should add plus button', () => { + expect(ctx.ctrl.addTagSegments.length).toBe(1); }); }); - describe('when tag added', function() { - beforeEach(function() { + describe('when tag added', () => { + beforeEach(() => { ctx.ctrl.target.target = 'seriesByTag()'; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.addNewTag({ value: 'tag1' }); }); - it('should update tags with default value', function() { + it('should update tags with default value', () => { const expected = [{ key: 'tag1', operator: '=', value: '' }]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); + expect(ctx.ctrl.queryModel.tags).toEqual(expected); }); - it('should update target', function() { + it('should update target', () => { const expected = "seriesByTag('tag1=')"; - expect(ctx.ctrl.target.target).to.eql(expected); + expect(ctx.ctrl.target.target).toEqual(expected); }); }); - describe('when tag changed', function() { - beforeEach(function() { + describe('when tag changed', () => { + beforeEach(() => { ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.tagChanged({ key: 'tag1', operator: '=', value: 'new_value' }, 0); }); - it('should update tags', function() { + it('should update tags', () => { const expected = [ { key: 'tag1', operator: '=', value: 'new_value' }, { key: 'tag2', operator: '!=~', value: 'value2' }, ]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); + expect(ctx.ctrl.queryModel.tags).toEqual(expected); }); - it('should update target', function() { + it('should update target', () => { const expected = "seriesByTag('tag1=new_value', 'tag2!=~value2')"; - expect(ctx.ctrl.target.target).to.eql(expected); + expect(ctx.ctrl.target.target).toEqual(expected); }); }); - describe('when tag removed', function() { - beforeEach(function() { + describe('when tag removed', () => { + beforeEach(() => { ctx.ctrl.target.target = "seriesByTag('tag1=value1', 'tag2!=~value2')"; - ctx.ctrl.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([{ expandable: false }])); + ctx.ctrl.datasource.metricFindQuery = () => Promise.resolve([{ expandable: false }]); ctx.ctrl.parseTarget(); ctx.ctrl.removeTag(0); }); - it('should update tags', function() { + it('should update tags', () => { const expected = [{ key: 'tag2', operator: '!=~', value: 'value2' }]; - expect(ctx.ctrl.queryModel.tags).to.eql(expected); + expect(ctx.ctrl.queryModel.tags).toEqual(expected); }); - it('should update target', function() { + it('should update target', () => { const expected = "seriesByTag('tag2!=~value2')"; - expect(ctx.ctrl.target.target).to.eql(expected); + expect(ctx.ctrl.target.target).toEqual(expected); }); }); }); From 1c691ac855142222dc4549a613d52a1171487e1d Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 15:51:34 +0200 Subject: [PATCH 186/263] changelog: add notes about closing #12533 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 826507e1bd65..0f3fb6b9d01c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,7 @@ * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) * **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) +* **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) # 5.2.1 (2018-06-29) From a63fca03b87193c87d6154628254998a06cf434d Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 15:57:07 +0200 Subject: [PATCH 187/263] changelog: add notes about closing #12551 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f3fb6b9d01c..6a7d2db1c14e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ * **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) * **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) * **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) +* **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) * **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) From 5de8b6c2f01cdfa0505f93e6469a38702fdd66fa Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 16:45:36 +0200 Subject: [PATCH 188/263] changelog: add notes about closing #12489 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a7d2db1c14e..aa794b92164e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,7 @@ * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) +* **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) # 5.2.2 (unreleased) From 27c081349fb11f1ad8d304873aa9cc92a45a2027 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 17:03:58 +0200 Subject: [PATCH 189/263] Remove old influx stuff --- public/app/plugins/datasource/influxdb/query_ctrl.ts | 2 +- .../app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/public/app/plugins/datasource/influxdb/query_ctrl.ts b/public/app/plugins/datasource/influxdb/query_ctrl.ts index 174497111433..ce669c9f4589 100644 --- a/public/app/plugins/datasource/influxdb/query_ctrl.ts +++ b/public/app/plugins/datasource/influxdb/query_ctrl.ts @@ -338,7 +338,7 @@ export class InfluxQueryCtrl extends QueryCtrl { this.tagSegments.push(this.uiSegmentSrv.newPlusButton()); } } - console.log(this.tagSegments); + this.rebuildTargetTagConditions(); } diff --git a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts index 151dd7ab0c6d..4daa48d6b9d3 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_ctrl_specs.ts @@ -57,13 +57,11 @@ describe('InfluxDBQueryCtrl', function() { }); it('should update tag key', function() { - console.log(ctx.ctrl.target.tags); expect(ctx.ctrl.target.tags[0].key).to.be('asd'); expect(ctx.ctrl.tagSegments[0].type).to.be('key'); }); it('should add tagSegments', function() { - console.log(ctx.ctrl.tagSegments); expect(ctx.ctrl.tagSegments.length).to.be(3); }); }); From d8d748d2aa9987e93e6b8988b66d2d217be98ac0 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 17:40:00 +0200 Subject: [PATCH 190/263] remove unneeded comment --- .../app/plugins/datasource/prometheus/specs/completer.jest.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts index fbe2dce0ce50..b29e4d272337 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.jest.ts @@ -5,8 +5,6 @@ jest.mock('../datasource'); jest.mock('app/core/services/backend_srv'); describe('Prometheus editor completer', function() { - //beforeEach(ctx.providePhase(['templateSrv'])); - function getSessionStub(data) { return { getTokenAt: jest.fn(() => data.currentToken), From ce9b25a5ac66f0f6a8b9a2f1c91b14c184ed9143 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Tue, 24 Jul 2018 18:30:29 +0200 Subject: [PATCH 191/263] Remove comments --- .../graphite/specs/query_ctrl.jest.ts | 44 ------------------- 1 file changed, 44 deletions(-) diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts index 58cefeef6f6a..b38ad56427bd 100644 --- a/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts +++ b/public/app/plugins/datasource/graphite/specs/query_ctrl.jest.ts @@ -1,8 +1,5 @@ import { uiSegmentSrv } from 'app/core/services/segment_srv'; -// import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - import gfunc from '../gfunc'; -// import helpers from 'test/specs/helpers'; import { GraphiteQueryCtrl } from '../query_ctrl'; describe('GraphiteQueryCtrl', () => { @@ -24,47 +21,6 @@ describe('GraphiteQueryCtrl', () => { targets: [ctx.target], }; - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach( - // angularMocks.module(function($compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - - //beforeEach(ctx.providePhase()); - // beforeEach( - // angularMocks.inject(($rootScope, $controller, $q) => { - // ctx.$q = $q; - // ctx.scope = $rootScope.$new(); - // ctx.target = { target: 'aliasByNode(scaleToSeconds(test.prod.*,1),2)' }; - // ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when([])); - // ctx.datasource.getFuncDefs = sinon.stub().returns(ctx.$q.when(gfunc.getFuncDefs('1.0'))); - // ctx.datasource.getFuncDef = gfunc.getFuncDef; - // ctx.datasource.waitForFuncDefsLoaded = sinon.stub().returns(ctx.$q.when(null)); - // ctx.datasource.createFuncInstance = gfunc.createFuncInstance; - // ctx.panelCtrl = { panel: {} }; - // ctx.panelCtrl = { - // panel: { - // targets: [ctx.target], - // }, - // }; - // ctx.panelCtrl.refresh = sinon.spy(); - - // ctx.ctrl = $controller( - // GraphiteQueryCtrl, - // { $scope: ctx.scope }, - // { - // panelCtrl: ctx.panelCtrl, - // datasource: ctx.datasource, - // target: ctx.target, - // } - // ); - // ctx.scope.$digest(); - // }) - // ); - beforeEach(() => { GraphiteQueryCtrl.prototype.target = ctx.target; GraphiteQueryCtrl.prototype.datasource = ctx.datasource; From 1dd9646a502c8f0749ed1752b25f39111677effb Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 19:05:09 +0200 Subject: [PATCH 192/263] fix failing test due to time diff issues --- pkg/services/sqlstore/dashboard_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/services/sqlstore/dashboard_test.go b/pkg/services/sqlstore/dashboard_test.go index 0ca1c5d67e49..8ff78c4a0ffa 100644 --- a/pkg/services/sqlstore/dashboard_test.go +++ b/pkg/services/sqlstore/dashboard_test.go @@ -181,7 +181,7 @@ func TestDashboardDataAccess(t *testing.T) { So(err, ShouldBeNil) So(query.Result.FolderId, ShouldEqual, 0) So(query.Result.CreatedBy, ShouldEqual, savedDash.CreatedBy) - So(query.Result.Created, ShouldEqual, savedDash.Created.Truncate(time.Second)) + So(query.Result.Created, ShouldHappenWithin, 3*time.Second, savedDash.Created) So(query.Result.UpdatedBy, ShouldEqual, 100) So(query.Result.Updated.IsZero(), ShouldBeFalse) }) From 582652145fa825cfce0a85b827d70f09b2cda45e Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 24 Jul 2018 19:21:23 +0200 Subject: [PATCH 193/263] minor fixes --- docs/sources/features/datasources/prometheus.md | 6 +++++- docs/sources/reference/templating.md | 3 +++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/docs/sources/features/datasources/prometheus.md b/docs/sources/features/datasources/prometheus.md index 190220fb0f17..0ed9e108df64 100644 --- a/docs/sources/features/datasources/prometheus.md +++ b/docs/sources/features/datasources/prometheus.md @@ -76,7 +76,11 @@ Name | Description For details of *metric names*, *label names* and *label values* are please refer to the [Prometheus documentation](http://prometheus.io/docs/concepts/data_model/#metric-names-and-labels). -It is possible to use some global template variables in Prometheus query template variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, where `$__range` is the dashboard's current time range and `$__range_ms` is the current range in milliseconds. +#### Using interval and range variables + +> Support for `$__range` and `$__range_ms` only available from Grafana v5.3 + +It's possible to use some global template variables in Prometheus query template variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, where `$__range` is the dashboard's current time range and `$__range_ms` is the current range in milliseconds. ### Using variables in queries diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index 08a142d36369..ce1a1299d26e 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -274,6 +274,9 @@ The `$__timeFilter` is used in the MySQL data source. This variable is only available in the Singlestat panel and can be used in the prefix or suffix fields on the Options tab. The variable will be replaced with the series name or alias. ### The $__range Variable + +> Only available in Grafana v5.3+ + Currently only supported for Prometheus data sources. This variable represents the range for the current dashboard. It is calculated by `to - from`. It has a millisecond representation called `$__range_ms`. ## Repeating Panels From 055d208a326f08cc4ad69324f9c4c1722b35e59e Mon Sep 17 00:00:00 2001 From: Mitsuhiro Tanda Date: Wed, 25 Jul 2018 11:27:43 +0900 Subject: [PATCH 194/263] fix invalid reference --- pkg/tsdb/cloudwatch/cloudwatch.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 4af73fc2ba9f..92352a513153 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -99,14 +99,15 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo continue } + RefId := queryContext.Queries[i].RefId query, err := parseQuery(queryContext.Queries[i].Model) if err != nil { - result.Results[query.RefId] = &tsdb.QueryResult{ + result.Results[RefId] = &tsdb.QueryResult{ Error: err, } return result, nil } - query.RefId = queryContext.Queries[i].RefId + query.RefId = RefId if query.Id != "" { if _, ok := getMetricDataQueries[query.Region]; !ok { From f4ab432542383c726d517f7a70000460d69ac4b3 Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Wed, 25 Jul 2018 10:29:55 +0200 Subject: [PATCH 195/263] added position absolute and some flexbox so I could remov changes in display and setTimeout, added tests and types, did some renaming --- public/app/containers/Teams/TeamList.tsx | 2 +- .../DeleteButton/DeleteButton.jest.tsx | 44 ++++++++++ .../components/DeleteButton/DeleteButton.tsx | 82 ++++++++----------- public/sass/components/_delete_button.scss | 37 +++++---- 4 files changed, 99 insertions(+), 66 deletions(-) create mode 100644 public/app/core/components/DeleteButton/DeleteButton.jest.tsx diff --git a/public/app/containers/Teams/TeamList.tsx b/public/app/containers/Teams/TeamList.tsx index 87d24f8ddd4c..b86763d87998 100644 --- a/public/app/containers/Teams/TeamList.tsx +++ b/public/app/containers/Teams/TeamList.tsx @@ -55,7 +55,7 @@ export class TeamList extends React.Component { {team.memberCount} - this.deleteTeam(team)} /> + this.deleteTeam(team)} /> ); diff --git a/public/app/core/components/DeleteButton/DeleteButton.jest.tsx b/public/app/core/components/DeleteButton/DeleteButton.jest.tsx new file mode 100644 index 000000000000..12acadee18ad --- /dev/null +++ b/public/app/core/components/DeleteButton/DeleteButton.jest.tsx @@ -0,0 +1,44 @@ +import React from 'react'; +import DeleteButton from './DeleteButton'; +import { shallow } from 'enzyme'; + +describe('DeleteButton', () => { + let wrapper; + let deleted; + + beforeAll(() => { + deleted = false; + + function deleteItem() { + deleted = true; + } + wrapper = shallow( deleteItem()} />); + }); + + it('should show confirm delete when clicked', () => { + expect(wrapper.state().showConfirm).toBe(false); + wrapper.find('.delete-button').simulate('click'); + expect(wrapper.state().showConfirm).toBe(true); + }); + + it('should hide confirm delete when clicked', () => { + wrapper.find('.delete-button').simulate('click'); + expect(wrapper.state().showConfirm).toBe(true); + wrapper + .find('.confirm-delete') + .find('.btn') + .at(0) + .simulate('click'); + expect(wrapper.state().showConfirm).toBe(false); + }); + + it('should show confirm delete when clicked', () => { + expect(deleted).toBe(false); + wrapper + .find('.confirm-delete') + .find('.btn') + .at(1) + .simulate('click'); + expect(deleted).toBe(true); + }); +}); diff --git a/public/app/core/components/DeleteButton/DeleteButton.tsx b/public/app/core/components/DeleteButton/DeleteButton.tsx index 61a322b591eb..a83ce6097ad0 100644 --- a/public/app/core/components/DeleteButton/DeleteButton.tsx +++ b/public/app/core/components/DeleteButton/DeleteButton.tsx @@ -1,73 +1,61 @@ -import React, { Component } from 'react'; +import React, { PureComponent } from 'react'; -export default class DeleteButton extends Component { - state = { - deleteButton: 'delete-button--show', - confirmSpan: 'confirm-delete--removed', +export interface DeleteButtonProps { + onConfirmDelete(); +} + +export interface DeleteButtonStates { + showConfirm: boolean; +} + +export default class DeleteButton extends PureComponent { + state: DeleteButtonStates = { + showConfirm: false, }; - handleDelete = event => { + onClickDelete = event => { if (event) { event.preventDefault(); } this.setState({ - deleteButton: 'delete-button--hide', + showConfirm: true, }); - - setTimeout(() => { - this.setState({ - deleteButton: 'delete-button--removed', - }); - }, 100); - - setTimeout(() => { - this.setState({ - confirmSpan: 'confirm-delete--hide', - }); - }, 100); - - setTimeout(() => { - this.setState({ - confirmSpan: 'confirm-delete--show', - }); - }, 150); }; - cancelDelete = event => { - event.preventDefault(); - + onClickCancel = event => { + if (event) { + event.preventDefault(); + } this.setState({ - confirmSpan: 'confirm-delete--hide', + showConfirm: false, }); - - setTimeout(() => { - this.setState({ - confirmSpan: 'confirm-delete--removed', - deleteButton: 'delete-button--hide', - }); - }, 140); - - setTimeout(() => { - this.setState({ - deleteButton: 'delete-button--show', - }); - }, 190); }; render() { - const { confirmDelete } = this.props; + const onClickConfirm = this.props.onConfirmDelete; + let showConfirm; + let showDeleteButton; + + if (this.state.showConfirm) { + showConfirm = 'show'; + showDeleteButton = 'hide'; + } else { + showConfirm = 'hide'; + showDeleteButton = 'show'; + } + return ( - + - - + + Cancel - + Confirm Delete diff --git a/public/sass/components/_delete_button.scss b/public/sass/components/_delete_button.scss index 19f32189d81c..e56a1181a093 100644 --- a/public/sass/components/_delete_button.scss +++ b/public/sass/components/_delete_button.scss @@ -1,49 +1,50 @@ +// sets a fixed width so that the rest of the table +// isn't affected by the animation .delete-button-container { - max-width: 24px; width: 24px; direction: rtl; - max-height: 38px; - display: block; + display: flex; + align-items: center; } +//this container is used to make sure confirm-delete isn't +//shown outside of table .confirm-delete-container { overflow: hidden; width: 145px; - display: block; + position: absolute; + z-index: 1; } .delete-button { - &--show { - display: inline-block; + position: absolute; + + &.show { opacity: 1; transition: opacity 0.1s ease; + z-index: 2; } - &--hide { - display: inline-block; + &.hide { opacity: 0; transition: opacity 0.1s ease; - } - &--removed { - display: none; + z-index: 0; } } .confirm-delete { - &--show { - display: inline-block; + display: flex; + align-items: flex-start; + + &.show { opacity: 1; transition: opacity 0.08s ease-out, transform 0.1s ease-out; transform: translateX(0); } - &--hide { - display: inline-block; + &.hide { opacity: 0; transition: opacity 0.12s ease-in, transform 0.14s ease-in; transform: translateX(100px); } - &--removed { - display: none; - } } From df62282c115cea465577b5f1c02077b87166255e Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Wed, 25 Jul 2018 11:27:43 +0200 Subject: [PATCH 196/263] fix for typeahead background, increased lighten --- public/sass/_variables.light.scss | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/sass/_variables.light.scss b/public/sass/_variables.light.scss index b6e9e7db979b..b6248da6a002 100644 --- a/public/sass/_variables.light.scss +++ b/public/sass/_variables.light.scss @@ -218,7 +218,7 @@ $search-filter-box-bg: $gray-7; // Typeahead $typeahead-shadow: 0 5px 10px 0 $gray-5; -$typeahead-selected-bg: lighten($blue, 25%); +$typeahead-selected-bg: lighten($blue, 57%); $typeahead-selected-color: $blue; // Dropdowns From 5fbd8ada3c55cfe8eecc57d894b6a445b76e00c9 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 11:54:51 +0200 Subject: [PATCH 197/263] changelog: add notes about closing #12668 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index aa794b92164e..27651b2216f4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,6 +33,7 @@ * **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) * **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) +* **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) # 5.2.1 (2018-06-29) From 45762d04e392be18658df8a0ecd081a03bb09b5f Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 11:55:34 +0200 Subject: [PATCH 198/263] changelog: update [skip ci] --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 27651b2216f4..0f813272e60f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,7 +23,7 @@ * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) -# 5.2.2 (unreleased) +# 5.2.2 (2018-07-25) ### Minor From 9c40028d58431fcab8c3d7dddb44b2593a0c7130 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 13:22:55 +0200 Subject: [PATCH 199/263] changelog: add notes about closing #12668 [skip ci] --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f813272e60f..990421d30d38 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,7 @@ * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) +* **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) # 5.2.2 (2018-07-25) @@ -33,7 +34,6 @@ * **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) * **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) -* **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) # 5.2.1 (2018-06-29) From 7e773e2d5e35045f87be875fa81ac2c930d1257f Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 14:14:25 +0200 Subject: [PATCH 200/263] changelog: add notes about closing #12533 [skip ci] --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 990421d30d38..6409f094f657 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,7 +33,7 @@ * **Postgres/MySQL/MSSQL**: Fix connection leak [#12636](https://github.com/grafana/grafana/issues/12636) [#9827](https://github.com/grafana/grafana/issues/9827) * **Plugins**: Fix loading of external plugins [#12551](https://github.com/grafana/grafana/issues/12551) * **Dashboard**: Remove unwanted scrollbars in embedded panels [#12589](https://github.com/grafana/grafana/issues/12589) -* **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533) +* **Prometheus**: Prevent error using $__interval_ms in query [#12533](https://github.com/grafana/grafana/pull/12533), thx [@mtanda](https://github.com/mtanda) # 5.2.1 (2018-06-29) From f3504612062f2bcf43a02c985942d5b70ca52439 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 25 Jul 2018 14:52:03 +0200 Subject: [PATCH 201/263] Start conversion --- .../specs/variable_srv_init.jest.ts | 238 ++++++++++++++++++ 1 file changed, 238 insertions(+) create mode 100644 public/app/features/templating/specs/variable_srv_init.jest.ts diff --git a/public/app/features/templating/specs/variable_srv_init.jest.ts b/public/app/features/templating/specs/variable_srv_init.jest.ts new file mode 100644 index 000000000000..218170ae4547 --- /dev/null +++ b/public/app/features/templating/specs/variable_srv_init.jest.ts @@ -0,0 +1,238 @@ +//import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; + +import '../all'; + +import _ from 'lodash'; +// import helpers from 'test/specs/helpers'; +// import { Emitter } from 'app/core/core'; +import { VariableSrv } from '../variable_srv'; +import $q from 'q'; + +describe('VariableSrv init', function() { + let templateSrv = { + init: () => {}, + }; + let $injector = { + instantiate: (vars, model) => { + return new vars(model.model); + }, + }; + let $rootscope = { + $on: () => {}, + }; + + let ctx = { + datasourceSrv: {}, + $location: {}, + dashboard: {}, + }; + + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.module(function($compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + + // beforeEach(ctx.providePhase(['datasourceSrv', 'timeSrv', 'templateSrv', '$location'])); + // beforeEach( + // angularMocks.inject(($rootScope, $q, $location, $injector) => { + // ctx.$q = $q; + // ctx.$rootScope = $rootScope; + // ctx.$location = $location; + // ctx.variableSrv = $injector.get('variableSrv'); + // ctx.$rootScope.$digest(); + // }) + // ); + + function describeInitScenario(desc, fn) { + describe(desc, function() { + // events: new Emitter(), + var scenario: any = { + urlParams: {}, + setup: setupFn => { + scenario.setupFn = setupFn; + }, + }; + + beforeEach(function() { + scenario.setupFn(); + ctx.variableSrv = new VariableSrv($rootscope, $q, {}, $injector, templateSrv); + ctx.variableSrv.datasource = {}; + ctx.variableSrv.datasource.metricFindQuery = jest.fn(() => Promise.resolve(scenario.queryResult)); + + ctx.variableSrv.datasourceSrv = { + get: () => Promise.resolve(ctx.datasource), + getMetricSources: () => Promise.resolve(scenario.metricSources), + }; + + ctx.variableSrv.$location.search = () => Promise.resolve(scenario.urlParams); + ctx.variableSrv.dashboard = { + templating: { list: scenario.variables }, + // events: new Emitter(), + }; + + ctx.variableSrv.init(ctx.variableSrv.dashboard); + // ctx.$rootScope.$digest(); + + scenario.variables = ctx.variableSrv.variables; + }); + + fn(scenario); + }); + } + + ['query', 'interval', 'custom', 'datasource'].forEach(type => { + describeInitScenario('when setting ' + type + ' variable via url', scenario => { + scenario.setup(() => { + scenario.variables = [ + { + name: 'apps', + type: type, + current: { text: 'test', value: 'test' }, + options: [{ text: 'test', value: 'test' }], + }, + ]; + scenario.urlParams['var-apps'] = 'new'; + scenario.metricSources = []; + }); + + it('should update current value', () => { + expect(scenario.variables[0].current.value).toBe('new'); + expect(scenario.variables[0].current.text).toBe('new'); + }); + }); + }); + + describe('given dependent variables', () => { + var variableList = [ + { + name: 'app', + type: 'query', + query: '', + current: { text: 'app1', value: 'app1' }, + options: [{ text: 'app1', value: 'app1' }], + }, + { + name: 'server', + type: 'query', + refresh: 1, + query: '$app.*', + current: { text: 'server1', value: 'server1' }, + options: [{ text: 'server1', value: 'server1' }], + }, + ]; + + describeInitScenario('when setting parent var from url', scenario => { + scenario.setup(() => { + scenario.variables = _.cloneDeep(variableList); + scenario.urlParams['var-app'] = 'google'; + scenario.queryResult = [{ text: 'google-server1' }, { text: 'google-server2' }]; + }); + + it('should update child variable', () => { + expect(scenario.variables[1].options.length).toBe(2); + expect(scenario.variables[1].current.text).toBe('google-server1'); + }); + + it('should only update it once', () => { + expect(ctx.variableSrv.datasource.metricFindQuery).toHaveBeenCalledTimes(1); + }); + }); + }); + + describeInitScenario('when datasource variable is initialized', scenario => { + scenario.setup(() => { + scenario.variables = [ + { + type: 'datasource', + query: 'graphite', + name: 'test', + current: { value: 'backend4_pee', text: 'backend4_pee' }, + regex: '/pee$/', + }, + ]; + scenario.metricSources = [ + { name: 'backend1', meta: { id: 'influx' } }, + { name: 'backend2_pee', meta: { id: 'graphite' } }, + { name: 'backend3', meta: { id: 'graphite' } }, + { name: 'backend4_pee', meta: { id: 'graphite' } }, + ]; + }); + + it('should update current value', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.options.length).toBe(2); + }); + }); + + describeInitScenario('when template variable is present in url multiple times', scenario => { + scenario.setup(() => { + scenario.variables = [ + { + name: 'apps', + type: 'query', + multi: true, + current: { text: 'val1', value: 'val1' }, + options: [ + { text: 'val1', value: 'val1' }, + { text: 'val2', value: 'val2' }, + { text: 'val3', value: 'val3', selected: true }, + ], + }, + ]; + scenario.urlParams['var-apps'] = ['val2', 'val1']; + }); + + it('should update current value', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.current.value.length).toBe(2); + expect(variable.current.value[0]).toBe('val2'); + expect(variable.current.value[1]).toBe('val1'); + expect(variable.current.text).toBe('val2 + val1'); + expect(variable.options[0].selected).toBe(true); + expect(variable.options[1].selected).toBe(true); + }); + + it('should set options that are not in value to selected false', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.options[2].selected).toBe(false); + }); + }); + + describeInitScenario('when template variable is present in url multiple times using key/values', scenario => { + scenario.setup(() => { + scenario.variables = [ + { + name: 'apps', + type: 'query', + multi: true, + current: { text: 'Val1', value: 'val1' }, + options: [ + { text: 'Val1', value: 'val1' }, + { text: 'Val2', value: 'val2' }, + { text: 'Val3', value: 'val3', selected: true }, + ], + }, + ]; + scenario.urlParams['var-apps'] = ['val2', 'val1']; + }); + + it('should update current value', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.current.value.length).toBe(2); + expect(variable.current.value[0]).toBe('val2'); + expect(variable.current.value[1]).toBe('val1'); + expect(variable.current.text).toBe('Val2 + Val1'); + expect(variable.options[0].selected).toBe(true); + expect(variable.options[1].selected).toBe(true); + }); + + it('should set options that are not in value to selected false', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.options[2].selected).toBe(false); + }); + }); +}); From 7d51c1524007fc47dc225e1256535c1386c07aca Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 25 Jul 2018 16:15:03 +0200 Subject: [PATCH 202/263] Two passing tests --- .../specs/variable_srv_init.jest.ts | 53 ++++++++++++++----- .../app/features/templating/variable_srv.ts | 1 + 2 files changed, 41 insertions(+), 13 deletions(-) diff --git a/public/app/features/templating/specs/variable_srv_init.jest.ts b/public/app/features/templating/specs/variable_srv_init.jest.ts index 218170ae4547..519adc0a3503 100644 --- a/public/app/features/templating/specs/variable_srv_init.jest.ts +++ b/public/app/features/templating/specs/variable_srv_init.jest.ts @@ -7,16 +7,18 @@ import _ from 'lodash'; // import { Emitter } from 'app/core/core'; import { VariableSrv } from '../variable_srv'; import $q from 'q'; +// import { model } from 'mobx-state-tree/dist/internal'; describe('VariableSrv init', function() { let templateSrv = { - init: () => {}, - }; - let $injector = { - instantiate: (vars, model) => { - return new vars(model.model); + init: vars => { + this.variables = vars; }, + variableInitialized: () => {}, + updateTemplateData: () => {}, + replace: str => str, }; + let $injector = {}; let $rootscope = { $on: () => {}, }; @@ -57,24 +59,35 @@ describe('VariableSrv init', function() { }, }; - beforeEach(function() { + beforeEach(async () => { scenario.setupFn(); + ctx = { + datasource: { + metricFindQuery: jest.fn(() => Promise.resolve(scenario.queryResult)), + }, + datasourceSrv: { + get: () => Promise.resolve(ctx.datasource), + getMetricSources: () => Promise.resolve(scenario.metricSources), + }, + templateSrv, + }; + ctx.variableSrv = new VariableSrv($rootscope, $q, {}, $injector, templateSrv); - ctx.variableSrv.datasource = {}; - ctx.variableSrv.datasource.metricFindQuery = jest.fn(() => Promise.resolve(scenario.queryResult)); - ctx.variableSrv.datasourceSrv = { - get: () => Promise.resolve(ctx.datasource), - getMetricSources: () => Promise.resolve(scenario.metricSources), + $injector.instantiate = (variable, model) => { + return getVarMockConstructor(variable, model, ctx); }; + ctx.variableSrv.datasource = ctx.datasource; + ctx.variableSrv.datasourceSrv = ctx.datasourceSrv; + ctx.variableSrv.$location.search = () => Promise.resolve(scenario.urlParams); ctx.variableSrv.dashboard = { templating: { list: scenario.variables }, - // events: new Emitter(), + // events: new Emitter(), }; - ctx.variableSrv.init(ctx.variableSrv.dashboard); + await ctx.variableSrv.init(ctx.variableSrv.dashboard); // ctx.$rootScope.$digest(); scenario.variables = ctx.variableSrv.variables; @@ -236,3 +249,17 @@ describe('VariableSrv init', function() { }); }); }); + +function getVarMockConstructor(variable, model, ctx) { + console.log(model.model.type); + switch (model.model.type) { + case 'datasource': + return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv); + case 'query': + return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv); + case 'interval': + return new variable(model.model, {}, ctx.templateSrv, ctx.variableSrv); + default: + return new variable(model.model); + } +} diff --git a/public/app/features/templating/variable_srv.ts b/public/app/features/templating/variable_srv.ts index 8ad3c2845e20..9f6522c9b86c 100644 --- a/public/app/features/templating/variable_srv.ts +++ b/public/app/features/templating/variable_srv.ts @@ -23,6 +23,7 @@ export class VariableSrv { // init variables for (let variable of this.variables) { + console.log(variable); variable.initLock = this.$q.defer(); } From 0f99e624b680b60e00ca05f408c5b85464d7cf81 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 25 Jul 2018 16:20:00 +0200 Subject: [PATCH 203/263] docs: using interval and range variables in prometheus Included example usages --- .../features/datasources/prometheus.md | 21 ++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/docs/sources/features/datasources/prometheus.md b/docs/sources/features/datasources/prometheus.md index 0ed9e108df64..3a04ef92e31a 100644 --- a/docs/sources/features/datasources/prometheus.md +++ b/docs/sources/features/datasources/prometheus.md @@ -80,7 +80,26 @@ For details of *metric names*, *label names* and *label values* are please refer > Support for `$__range` and `$__range_ms` only available from Grafana v5.3 -It's possible to use some global template variables in Prometheus query template variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, where `$__range` is the dashboard's current time range and `$__range_ms` is the current range in milliseconds. +It's possible to use some global built-in variables in query variables; `$__interval`, `$__interval_ms`, `$__range` and `$__range_ms`, see [Global built-in variables](/reference/templating/#global-built-in-variables) for more information. These can be convenient to use in conjunction with the `query_result` function when you need to filter variable queries since +`label_values` function doesn't support queries. + +Make sure to set the variable's `refresh` trigger to be `On Time Range Change` to get the correct instances when changing the time range on the dashboard. + +**Example usage:** + +Populate a variable with the the busiest 5 request instances based on average QPS over the time range shown in the dashboard: + +``` +Query: query_result(topk(5, sum(rate(http_requests_total[$__range])) by (instance))) +Regex: /"([^"]+)"/ +``` + +Populate a variable with the instances having a certain state over the time range shown in the dashboard: + +``` +Query: query_result(max_over_time([$__range]) != ) +Regex: +``` ### Using variables in queries From 84e431d377b51405f37b4bae8321454218bcc7c4 Mon Sep 17 00:00:00 2001 From: David Kaltschmidt Date: Wed, 25 Jul 2018 16:16:33 +0200 Subject: [PATCH 204/263] Add tslib to TS compiler - using tslib reduces bundle sizes - add compiler option for easier default imports of CJS modules - remove double entry of fork-ts-checker-plugin - speed up hot reload by using exprimental ts-loader API --- package.json | 16 ++++---- scripts/webpack/webpack.hot.js | 10 ++++- tsconfig.json | 73 +++++++++++++++++++--------------- yarn.lock | 8 +++- 4 files changed, 65 insertions(+), 42 deletions(-) diff --git a/package.json b/package.json index c26438230cc8..c0581c1de43e 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,7 @@ "expose-loader": "^0.7.3", "extract-text-webpack-plugin": "^4.0.0-beta.0", "file-loader": "^1.1.11", - "fork-ts-checker-webpack-plugin": "^0.4.1", + "fork-ts-checker-webpack-plugin": "^0.4.2", "gaze": "^1.1.2", "glob": "~7.0.0", "grunt": "1.0.1", @@ -71,12 +71,14 @@ "karma-webpack": "^3.0.0", "lint-staged": "^6.0.0", "load-grunt-tasks": "3.5.2", + "mini-css-extract-plugin": "^0.4.0", "mobx-react-devtools": "^4.2.15", "mocha": "^4.0.1", "ng-annotate-loader": "^0.6.1", "ng-annotate-webpack-plugin": "^0.2.1-pre", "ngtemplate-loader": "^2.0.1", "npm": "^5.4.2", + "optimize-css-assets-webpack-plugin": "^4.0.2", "phantomjs-prebuilt": "^2.1.15", "postcss-browser-reporter": "^0.5.0", "postcss-loader": "^2.0.6", @@ -90,15 +92,16 @@ "style-loader": "^0.21.0", "systemjs": "0.20.19", "systemjs-plugin-css": "^0.1.36", - "ts-loader": "^4.3.0", "ts-jest": "^22.4.6", + "ts-loader": "^4.3.0", + "tslib": "^1.9.3", "tslint": "^5.8.0", "tslint-loader": "^3.5.3", "typescript": "^2.6.2", + "uglifyjs-webpack-plugin": "^1.2.7", "webpack": "^4.8.0", "webpack-bundle-analyzer": "^2.9.0", "webpack-cleanup-plugin": "^0.5.1", - "fork-ts-checker-webpack-plugin": "^0.4.2", "webpack-cli": "^2.1.4", "webpack-dev-server": "^3.1.0", "webpack-merge": "^4.1.0", @@ -155,14 +158,12 @@ "immutable": "^3.8.2", "jquery": "^3.2.1", "lodash": "^4.17.10", - "mini-css-extract-plugin": "^0.4.0", "mobx": "^3.4.1", "mobx-react": "^4.3.5", "mobx-state-tree": "^1.3.1", "moment": "^2.22.2", "mousetrap": "^1.6.0", "mousetrap-global-bind": "^1.1.0", - "optimize-css-assets-webpack-plugin": "^4.0.2", "prismjs": "^1.6.0", "prop-types": "^15.6.0", "react": "^16.2.0", @@ -181,10 +182,9 @@ "slate-react": "^0.12.4", "tether": "^1.4.0", "tether-drop": "https://github.com/torkelo/drop/tarball/master", - "tinycolor2": "^1.4.1", - "uglifyjs-webpack-plugin": "^1.2.7" + "tinycolor2": "^1.4.1" }, "resolutions": { "caniuse-db": "1.0.30000772" } -} +} \ No newline at end of file diff --git a/scripts/webpack/webpack.hot.js b/scripts/webpack/webpack.hot.js index 28c8cec504d8..0305a6f465c7 100644 --- a/scripts/webpack/webpack.hot.js +++ b/scripts/webpack/webpack.hot.js @@ -20,6 +20,7 @@ module.exports = merge(common, { path: path.resolve(__dirname, '../../public/build'), filename: '[name].[hash].js', publicPath: "/public/build/", + pathinfo: false, }, resolve: { @@ -37,6 +38,12 @@ module.exports = merge(common, { } }, + optimization: { + removeAvailableModules: false, + removeEmptyChunks: false, + splitChunks: false, + }, + module: { rules: [ { @@ -56,7 +63,8 @@ module.exports = merge(common, { { loader: 'ts-loader', options: { - transpileOnly: true + transpileOnly: true, + experimentalWatchApi: true }, }], }, diff --git a/tsconfig.json b/tsconfig.json index 3596930a62ff..3ef1dd1b7695 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,32 +1,43 @@ { - "compilerOptions": { - "moduleResolution": "node", - "outDir": "public/dist", - "target": "es5", - "lib": ["es6", "dom"], - "rootDir": "public/", - "jsx": "react", - "module": "esnext", - "declaration": false, - "allowSyntheticDefaultImports": true, - "inlineSourceMap": false, - "sourceMap": true, - "noEmitOnError": false, - "emitDecoratorMetadata": false, - "experimentalDecorators": true, - "noImplicitReturns": true, - "noImplicitThis": false, - "noImplicitUseStrict":false, - "noImplicitAny": false, - "noUnusedLocals": true, - "baseUrl": "public", - "paths": { - "app": ["app"] - } - }, - "include": [ - "public/app/**/*.ts", - "public/app/**/*.tsx", - "public/test/**/*.ts" - ] -} + "compilerOptions": { + "moduleResolution": "node", + "outDir": "public/dist", + "target": "es5", + "lib": [ + "es6", + "dom" + ], + "rootDir": "public/", + "jsx": "react", + "module": "esnext", + "declaration": false, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "importHelpers": true, // importing helper functions from tslib + "noEmitHelpers": true, // disable emitting inline helper functions + "removeComments": false, // comments are needed by angular injections + "inlineSourceMap": false, + "sourceMap": true, + "noEmitOnError": false, + "emitDecoratorMetadata": false, + "experimentalDecorators": true, + "noImplicitReturns": true, + "noImplicitThis": false, + "noImplicitUseStrict": false, + "noImplicitAny": false, + "noUnusedLocals": true, + "baseUrl": "public", + "pretty": true, + "paths": { + "app": [ + "app" + ] + } + }, + "include": [ + "public/app/**/*.ts", + "public/app/**/*.tsx", + "public/test/**/*.ts" + ] +} \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index 6772d7c14a49..6e737e33348b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3101,7 +3101,7 @@ d3-request@1.0.6: d3-dsv "1" xmlhttprequest "1" -d3-scale-chromatic@^1.1.1: +d3-scale-chromatic@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-1.3.0.tgz#7ee38ffcaa7ad55cfed83a6a668aac5570c653c4" dependencies: @@ -7974,7 +7974,7 @@ mocha@^4.0.1: mkdirp "0.5.1" supports-color "4.4.0" -moment@^2.18.1: +moment@^2.22.2: version "2.22.2" resolved "https://registry.yarnpkg.com/moment/-/moment-2.22.2.tgz#3c257f9839fc0e93ff53149632239eb90783ff66" @@ -12029,6 +12029,10 @@ tslib@^1.8.0, tslib@^1.8.1, tslib@^1.9.0: version "1.9.2" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.2.tgz#8be0cc9a1f6dc7727c38deb16c2ebd1a2892988e" +tslib@^1.9.3: + version "1.9.3" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.9.3.tgz#d7e4dd79245d85428c4d7e4822a79917954ca286" + tslint-loader@^3.5.3: version "3.6.0" resolved "https://registry.yarnpkg.com/tslint-loader/-/tslint-loader-3.6.0.tgz#12ed4d5ef57d68be25cd12692fb2108b66469d76" From 931b944cddb879dfbfb44c5da18bfda43d36a0e9 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Wed, 25 Jul 2018 17:38:45 +0200 Subject: [PATCH 205/263] Almost all tests passing --- .../specs/variable_srv_init.jest.ts | 42 +++++-------------- .../app/features/templating/variable_srv.ts | 1 - 2 files changed, 10 insertions(+), 33 deletions(-) diff --git a/public/app/features/templating/specs/variable_srv_init.jest.ts b/public/app/features/templating/specs/variable_srv_init.jest.ts index 519adc0a3503..eba0ba8cfee8 100644 --- a/public/app/features/templating/specs/variable_srv_init.jest.ts +++ b/public/app/features/templating/specs/variable_srv_init.jest.ts @@ -1,13 +1,9 @@ -//import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - import '../all'; import _ from 'lodash'; -// import helpers from 'test/specs/helpers'; -// import { Emitter } from 'app/core/core'; import { VariableSrv } from '../variable_srv'; import $q from 'q'; -// import { model } from 'mobx-state-tree/dist/internal'; +// import { TemplateSrv } from '../template_srv'; describe('VariableSrv init', function() { let templateSrv = { @@ -16,8 +12,9 @@ describe('VariableSrv init', function() { }, variableInitialized: () => {}, updateTemplateData: () => {}, - replace: str => str, + replace: () => ' /pee$/', }; + // let templateSrv = new TemplateSrv(); let $injector = {}; let $rootscope = { $on: () => {}, @@ -29,29 +26,8 @@ describe('VariableSrv init', function() { dashboard: {}, }; - // beforeEach(angularMocks.module('grafana.core')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach( - // angularMocks.module(function($compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - - // beforeEach(ctx.providePhase(['datasourceSrv', 'timeSrv', 'templateSrv', '$location'])); - // beforeEach( - // angularMocks.inject(($rootScope, $q, $location, $injector) => { - // ctx.$q = $q; - // ctx.$rootScope = $rootScope; - // ctx.$location = $location; - // ctx.variableSrv = $injector.get('variableSrv'); - // ctx.$rootScope.$digest(); - // }) - // ); - function describeInitScenario(desc, fn) { describe(desc, function() { - // events: new Emitter(), var scenario: any = { urlParams: {}, setup: setupFn => { @@ -81,14 +57,12 @@ describe('VariableSrv init', function() { ctx.variableSrv.datasource = ctx.datasource; ctx.variableSrv.datasourceSrv = ctx.datasourceSrv; - ctx.variableSrv.$location.search = () => Promise.resolve(scenario.urlParams); + ctx.variableSrv.$location.search = () => scenario.urlParams; ctx.variableSrv.dashboard = { templating: { list: scenario.variables }, - // events: new Emitter(), }; await ctx.variableSrv.init(ctx.variableSrv.dashboard); - // ctx.$rootScope.$digest(); scenario.variables = ctx.variableSrv.variables; }); @@ -113,6 +87,7 @@ describe('VariableSrv init', function() { }); it('should update current value', () => { + console.log(type); expect(scenario.variables[0].current.value).toBe('new'); expect(scenario.variables[0].current.text).toBe('new'); }); @@ -176,6 +151,7 @@ describe('VariableSrv init', function() { }); it('should update current value', function() { + console.log(ctx.variableSrv.variables[0].options); var variable = ctx.variableSrv.variables[0]; expect(variable.options.length).toBe(2); }); @@ -251,14 +227,16 @@ describe('VariableSrv init', function() { }); function getVarMockConstructor(variable, model, ctx) { - console.log(model.model.type); + // console.log(model.model.type); switch (model.model.type) { case 'datasource': - return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv); + return new variable(model.model, ctx.datasourceSrv, ctx.variableSrv, ctx.templateSrv); case 'query': return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv); case 'interval': return new variable(model.model, {}, ctx.templateSrv, ctx.variableSrv); + case 'custom': + return new variable(model.model, ctx.variableSrv); default: return new variable(model.model); } diff --git a/public/app/features/templating/variable_srv.ts b/public/app/features/templating/variable_srv.ts index 9f6522c9b86c..8ad3c2845e20 100644 --- a/public/app/features/templating/variable_srv.ts +++ b/public/app/features/templating/variable_srv.ts @@ -23,7 +23,6 @@ export class VariableSrv { // init variables for (let variable of this.variables) { - console.log(variable); variable.initLock = this.$q.defer(); } From 35cc85bfcc46efdc79cf22b98741a6ea34b93d58 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 26 Jul 2018 09:36:46 +0200 Subject: [PATCH 206/263] All tests passing. Remove Karma test. --- .../specs/variable_srv_init.jest.ts | 31 ++- .../specs/variable_srv_init_specs.ts | 216 ------------------ 2 files changed, 13 insertions(+), 234 deletions(-) delete mode 100644 public/app/features/templating/specs/variable_srv_init_specs.ts diff --git a/public/app/features/templating/specs/variable_srv_init.jest.ts b/public/app/features/templating/specs/variable_srv_init.jest.ts index eba0ba8cfee8..ea8689f528b1 100644 --- a/public/app/features/templating/specs/variable_srv_init.jest.ts +++ b/public/app/features/templating/specs/variable_srv_init.jest.ts @@ -3,7 +3,6 @@ import '../all'; import _ from 'lodash'; import { VariableSrv } from '../variable_srv'; import $q from 'q'; -// import { TemplateSrv } from '../template_srv'; describe('VariableSrv init', function() { let templateSrv = { @@ -12,22 +11,21 @@ describe('VariableSrv init', function() { }, variableInitialized: () => {}, updateTemplateData: () => {}, - replace: () => ' /pee$/', + replace: str => + str.replace(this.regex, match => { + return match; + }), }; - // let templateSrv = new TemplateSrv(); + let $injector = {}; let $rootscope = { $on: () => {}, }; - let ctx = { - datasourceSrv: {}, - $location: {}, - dashboard: {}, - }; + let ctx = {}; function describeInitScenario(desc, fn) { - describe(desc, function() { + describe(desc, () => { var scenario: any = { urlParams: {}, setup: setupFn => { @@ -43,7 +41,7 @@ describe('VariableSrv init', function() { }, datasourceSrv: { get: () => Promise.resolve(ctx.datasource), - getMetricSources: () => Promise.resolve(scenario.metricSources), + getMetricSources: () => scenario.metricSources, }, templateSrv, }; @@ -87,7 +85,6 @@ describe('VariableSrv init', function() { }); it('should update current value', () => { - console.log(type); expect(scenario.variables[0].current.value).toBe('new'); expect(scenario.variables[0].current.text).toBe('new'); }); @@ -150,8 +147,7 @@ describe('VariableSrv init', function() { ]; }); - it('should update current value', function() { - console.log(ctx.variableSrv.variables[0].options); + it('should update current value', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.options.length).toBe(2); }); @@ -175,7 +171,7 @@ describe('VariableSrv init', function() { scenario.urlParams['var-apps'] = ['val2', 'val1']; }); - it('should update current value', function() { + it('should update current value', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.current.value.length).toBe(2); expect(variable.current.value[0]).toBe('val2'); @@ -185,7 +181,7 @@ describe('VariableSrv init', function() { expect(variable.options[1].selected).toBe(true); }); - it('should set options that are not in value to selected false', function() { + it('should set options that are not in value to selected false', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.options[2].selected).toBe(false); }); @@ -209,7 +205,7 @@ describe('VariableSrv init', function() { scenario.urlParams['var-apps'] = ['val2', 'val1']; }); - it('should update current value', function() { + it('should update current value', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.current.value.length).toBe(2); expect(variable.current.value[0]).toBe('val2'); @@ -219,7 +215,7 @@ describe('VariableSrv init', function() { expect(variable.options[1].selected).toBe(true); }); - it('should set options that are not in value to selected false', function() { + it('should set options that are not in value to selected false', () => { var variable = ctx.variableSrv.variables[0]; expect(variable.options[2].selected).toBe(false); }); @@ -227,7 +223,6 @@ describe('VariableSrv init', function() { }); function getVarMockConstructor(variable, model, ctx) { - // console.log(model.model.type); switch (model.model.type) { case 'datasource': return new variable(model.model, ctx.datasourceSrv, ctx.variableSrv, ctx.templateSrv); diff --git a/public/app/features/templating/specs/variable_srv_init_specs.ts b/public/app/features/templating/specs/variable_srv_init_specs.ts deleted file mode 100644 index 11639c6aa8f4..000000000000 --- a/public/app/features/templating/specs/variable_srv_init_specs.ts +++ /dev/null @@ -1,216 +0,0 @@ -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - -import '../all'; - -import _ from 'lodash'; -import helpers from 'test/specs/helpers'; -import { Emitter } from 'app/core/core'; - -describe('VariableSrv init', function() { - var ctx = new helpers.ControllerTestContext(); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - - beforeEach(ctx.providePhase(['datasourceSrv', 'timeSrv', 'templateSrv', '$location'])); - beforeEach( - angularMocks.inject(($rootScope, $q, $location, $injector) => { - ctx.$q = $q; - ctx.$rootScope = $rootScope; - ctx.$location = $location; - ctx.variableSrv = $injector.get('variableSrv'); - ctx.$rootScope.$digest(); - }) - ); - - function describeInitScenario(desc, fn) { - describe(desc, function() { - var scenario: any = { - urlParams: {}, - setup: setupFn => { - scenario.setupFn = setupFn; - }, - }; - - beforeEach(function() { - scenario.setupFn(); - ctx.datasource = {}; - ctx.datasource.metricFindQuery = sinon.stub().returns(ctx.$q.when(scenario.queryResult)); - - ctx.datasourceSrv.get = sinon.stub().returns(ctx.$q.when(ctx.datasource)); - ctx.datasourceSrv.getMetricSources = sinon.stub().returns(scenario.metricSources); - - ctx.$location.search = sinon.stub().returns(scenario.urlParams); - ctx.dashboard = { - templating: { list: scenario.variables }, - events: new Emitter(), - }; - - ctx.variableSrv.init(ctx.dashboard); - ctx.$rootScope.$digest(); - - scenario.variables = ctx.variableSrv.variables; - }); - - fn(scenario); - }); - } - - ['query', 'interval', 'custom', 'datasource'].forEach(type => { - describeInitScenario('when setting ' + type + ' variable via url', scenario => { - scenario.setup(() => { - scenario.variables = [ - { - name: 'apps', - type: type, - current: { text: 'test', value: 'test' }, - options: [{ text: 'test', value: 'test' }], - }, - ]; - scenario.urlParams['var-apps'] = 'new'; - scenario.metricSources = []; - }); - - it('should update current value', () => { - expect(scenario.variables[0].current.value).to.be('new'); - expect(scenario.variables[0].current.text).to.be('new'); - }); - }); - }); - - describe('given dependent variables', () => { - var variableList = [ - { - name: 'app', - type: 'query', - query: '', - current: { text: 'app1', value: 'app1' }, - options: [{ text: 'app1', value: 'app1' }], - }, - { - name: 'server', - type: 'query', - refresh: 1, - query: '$app.*', - current: { text: 'server1', value: 'server1' }, - options: [{ text: 'server1', value: 'server1' }], - }, - ]; - - describeInitScenario('when setting parent var from url', scenario => { - scenario.setup(() => { - scenario.variables = _.cloneDeep(variableList); - scenario.urlParams['var-app'] = 'google'; - scenario.queryResult = [{ text: 'google-server1' }, { text: 'google-server2' }]; - }); - - it('should update child variable', () => { - expect(scenario.variables[1].options.length).to.be(2); - expect(scenario.variables[1].current.text).to.be('google-server1'); - }); - - it('should only update it once', () => { - expect(ctx.datasource.metricFindQuery.callCount).to.be(1); - }); - }); - }); - - describeInitScenario('when datasource variable is initialized', scenario => { - scenario.setup(() => { - scenario.variables = [ - { - type: 'datasource', - query: 'graphite', - name: 'test', - current: { value: 'backend4_pee', text: 'backend4_pee' }, - regex: '/pee$/', - }, - ]; - scenario.metricSources = [ - { name: 'backend1', meta: { id: 'influx' } }, - { name: 'backend2_pee', meta: { id: 'graphite' } }, - { name: 'backend3', meta: { id: 'graphite' } }, - { name: 'backend4_pee', meta: { id: 'graphite' } }, - ]; - }); - - it('should update current value', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.options.length).to.be(2); - }); - }); - - describeInitScenario('when template variable is present in url multiple times', scenario => { - scenario.setup(() => { - scenario.variables = [ - { - name: 'apps', - type: 'query', - multi: true, - current: { text: 'val1', value: 'val1' }, - options: [ - { text: 'val1', value: 'val1' }, - { text: 'val2', value: 'val2' }, - { text: 'val3', value: 'val3', selected: true }, - ], - }, - ]; - scenario.urlParams['var-apps'] = ['val2', 'val1']; - }); - - it('should update current value', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.current.value.length).to.be(2); - expect(variable.current.value[0]).to.be('val2'); - expect(variable.current.value[1]).to.be('val1'); - expect(variable.current.text).to.be('val2 + val1'); - expect(variable.options[0].selected).to.be(true); - expect(variable.options[1].selected).to.be(true); - }); - - it('should set options that are not in value to selected false', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.options[2].selected).to.be(false); - }); - }); - - describeInitScenario('when template variable is present in url multiple times using key/values', scenario => { - scenario.setup(() => { - scenario.variables = [ - { - name: 'apps', - type: 'query', - multi: true, - current: { text: 'Val1', value: 'val1' }, - options: [ - { text: 'Val1', value: 'val1' }, - { text: 'Val2', value: 'val2' }, - { text: 'Val3', value: 'val3', selected: true }, - ], - }, - ]; - scenario.urlParams['var-apps'] = ['val2', 'val1']; - }); - - it('should update current value', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.current.value.length).to.be(2); - expect(variable.current.value[0]).to.be('val2'); - expect(variable.current.value[1]).to.be('val1'); - expect(variable.current.text).to.be('Val2 + Val1'); - expect(variable.options[0].selected).to.be(true); - expect(variable.options[1].selected).to.be(true); - }); - - it('should set options that are not in value to selected false', function() { - var variable = ctx.variableSrv.variables[0]; - expect(variable.options[2].selected).to.be(false); - }); - }); -}); From 88e91b3f51fa2c5a66442bfa3322abbfbeebd950 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 26 Jul 2018 10:44:40 +0200 Subject: [PATCH 207/263] Begin conversion --- .../panel/singlestat/specs/singlestat.jest.ts | 384 ++++++++++++++++++ 1 file changed, 384 insertions(+) create mode 100644 public/app/plugins/panel/singlestat/specs/singlestat.jest.ts diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts new file mode 100644 index 000000000000..2c945aa6eb23 --- /dev/null +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -0,0 +1,384 @@ +// import { describe, beforeEach, afterEach, it, sinon, expect, angularMocks } from 'test/lib/common'; + +// import helpers from 'test/specs/helpers'; +import { SingleStatCtrl } from '../module'; +import moment from 'moment'; + +describe('SingleStatCtrl', function() { + let ctx = {}; + let epoch = 1505826363746; + let clock; + + let $scope = { + $on: () => {}, + }; + + let $injector = { + get: () => {}, + }; + + SingleStatCtrl.prototype.panel = { + events: { + on: () => {}, + emit: () => {}, + }, + }; + SingleStatCtrl.prototype.dashboard = { + isTimezoneUtc: () => {}, + }; + + function singleStatScenario(desc, func) { + describe(desc, function() { + ctx.setup = function(setupFunc) { + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach(angularMocks.module('grafana.controllers')); + // beforeEach( + // angularMocks.module(function($compileProvider) { + // $compileProvider.preAssignBindingsEnabled(true); + // }) + // ); + + // beforeEach(ctx.providePhase()); + // beforeEach(ctx.createPanelController(SingleStatCtrl)); + + beforeEach(function() { + ctx.ctrl = new SingleStatCtrl($scope, $injector, {}); + setupFunc(); + ctx.ctrl.onDataReceived(ctx.data); + ctx.data = ctx.ctrl.data; + }); + }; + + func(ctx); + }); + } + + singleStatScenario('with defaults', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 1], [20, 2]] }]; + }); + + it('Should use series avg as default main value', function() { + expect(ctx.data.value).toBe(15); + expect(ctx.data.valueRounded).toBe(15); + }); + + it('should set formatted falue', function() { + expect(ctx.data.valueFormatted).toBe('15'); + }); + }); + + singleStatScenario('showing serie name instead of value', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 1], [20, 2]] }]; + ctx.ctrl.panel.valueName = 'name'; + }); + + it('Should use series avg as default main value', function() { + expect(ctx.data.value).toBe(0); + expect(ctx.data.valueRounded).toBe(0); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe('test.cpu1'); + }); + }); + + singleStatScenario('showing last iso time instead of value', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeAsIso'; + }); + + it('Should use time instead of value', function() { + console.log(ctx.data.value); + expect(ctx.data.value).toBe(1505634997920); + expect(ctx.data.valueRounded).toBe(1505634997920); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe(moment(1505634997920).format('YYYY-MM-DD HH:mm:ss')); + }); + }); + + singleStatScenario('showing last iso time instead of value (in UTC)', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeAsIso'; + // ctx.setIsUtc(true); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe(moment.utc(1505634997920).format('YYYY-MM-DD HH:mm:ss')); + }); + }); + + singleStatScenario('showing last us time instead of value', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeAsUS'; + }); + + it('Should use time instead of value', function() { + expect(ctx.data.value).toBe(1505634997920); + expect(ctx.data.valueRounded).toBe(1505634997920); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe(moment(1505634997920).format('MM/DD/YYYY h:mm:ss a')); + }); + }); + + singleStatScenario('showing last us time instead of value (in UTC)', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeAsUS'; + // ctx.setIsUtc(true); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe(moment.utc(1505634997920).format('MM/DD/YYYY h:mm:ss a')); + }); + }); + + singleStatScenario('showing last time from now instead of value', function(ctx) { + beforeEach(() => { + // clock = sinon.useFakeTimers(epoch); + jest.useFakeTimers(); + }); + + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeFromNow'; + }); + + it('Should use time instead of value', function() { + expect(ctx.data.value).toBe(1505634997920); + expect(ctx.data.valueRounded).toBe(1505634997920); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe('2 days ago'); + }); + + afterEach(() => { + jest.clearAllTimers(); + }); + }); + + singleStatScenario('showing last time from now instead of value (in UTC)', function(ctx) { + beforeEach(() => { + // clock = sinon.useFakeTimers(epoch); + jest.useFakeTimers(); + }); + + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeFromNow'; + // ctx.setIsUtc(true); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe('2 days ago'); + }); + + afterEach(() => { + jest.clearAllTimers(); + }); + }); + + singleStatScenario('MainValue should use same number for decimals as displayed when checking thresholds', function( + ctx + ) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[99.999, 1], [99.99999, 2]] }]; + }); + + it('Should be rounded', function() { + expect(ctx.data.value).toBe(99.999495); + expect(ctx.data.valueRounded).toBe(100); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe('100'); + }); + }); + + singleStatScenario('When value to text mapping is specified', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[9.9, 1]] }]; + ctx.ctrl.panel.valueMaps = [{ value: '10', text: 'OK' }]; + }); + + it('value should remain', function() { + expect(ctx.data.value).toBe(9.9); + }); + + it('round should be rounded up', function() { + expect(ctx.data.valueRounded).toBe(10); + }); + + it('Should replace value with text', function() { + expect(ctx.data.valueFormatted).toBe('OK'); + }); + }); + + singleStatScenario('When range to text mapping is specified for first range', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[41, 50]] }]; + ctx.ctrl.panel.mappingType = 2; + ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; + }); + + it('Should replace value with text OK', function() { + expect(ctx.data.valueFormatted).toBe('OK'); + }); + }); + + singleStatScenario('When range to text mapping is specified for other ranges', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[65, 75]] }]; + ctx.ctrl.panel.mappingType = 2; + ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; + }); + + it('Should replace value with text NOT OK', function() { + expect(ctx.data.valueFormatted).toBe('NOT OK'); + }); + }); + + describe('When table data', function() { + const tableData = [ + { + columns: [{ text: 'Time', type: 'time' }, { text: 'test1' }, { text: 'mean' }, { text: 'test2' }], + rows: [[1492759673649, 'ignore1', 15, 'ignore2']], + type: 'table', + }, + ]; + + singleStatScenario('with default values', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.ctrl.panel.tableColumn = 'mean'; + }); + + it('Should use first rows value as default main value', function() { + expect(ctx.data.value).toBe(15); + expect(ctx.data.valueRounded).toBe(15); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).toBe('15'); + }); + }); + + singleStatScenario('When table data has multiple columns', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.ctrl.panel.tableColumn = ''; + }); + + it('Should set column to first column that is not time', function() { + expect(ctx.ctrl.panel.tableColumn).toBe('test1'); + }); + }); + + singleStatScenario('MainValue should use same number for decimals as displayed when checking thresholds', function( + ctx + ) { + ctx.setup(function() { + ctx.data = tableData; + ctx.data[0].rows[0] = [1492759673649, 'ignore1', 99.99999, 'ignore2']; + ctx.ctrl.panel.tableColumn = 'mean'; + }); + + it('Should be rounded', function() { + expect(ctx.data.value).toBe(99.99999); + expect(ctx.data.valueRounded).toBe(100); + }); + + it('should set formatted falue', function() { + expect(ctx.data.valueFormatted).toBe('100'); + }); + }); + + singleStatScenario('When value to text mapping is specified', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.data[0].rows[0] = [1492759673649, 'ignore1', 9.9, 'ignore2']; + ctx.ctrl.panel.tableColumn = 'mean'; + ctx.ctrl.panel.valueMaps = [{ value: '10', text: 'OK' }]; + }); + + it('value should remain', function() { + expect(ctx.data.value).toBe(9.9); + }); + + it('round should be rounded up', function() { + expect(ctx.data.valueRounded).toBe(10); + }); + + it('Should replace value with text', function() { + expect(ctx.data.valueFormatted).toBe('OK'); + }); + }); + + singleStatScenario('When range to text mapping is specified for first range', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.data[0].rows[0] = [1492759673649, 'ignore1', 41, 'ignore2']; + ctx.ctrl.panel.tableColumn = 'mean'; + ctx.ctrl.panel.mappingType = 2; + ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; + }); + + it('Should replace value with text OK', function() { + expect(ctx.data.valueFormatted).toBe('OK'); + }); + }); + + singleStatScenario('When range to text mapping is specified for other ranges', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.data[0].rows[0] = [1492759673649, 'ignore1', 65, 'ignore2']; + ctx.ctrl.panel.tableColumn = 'mean'; + ctx.ctrl.panel.mappingType = 2; + ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; + }); + + it('Should replace value with text NOT OK', function() { + expect(ctx.data.valueFormatted).toBe('NOT OK'); + }); + }); + + singleStatScenario('When value is string', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.data[0].rows[0] = [1492759673649, 'ignore1', 65, 'ignore2']; + ctx.ctrl.panel.tableColumn = 'test1'; + }); + + it('Should replace value with text NOT OK', function() { + expect(ctx.data.valueFormatted).toBe('ignore1'); + }); + }); + + singleStatScenario('When value is zero', function(ctx) { + ctx.setup(function() { + ctx.data = tableData; + ctx.data[0].rows[0] = [1492759673649, 'ignore1', 0, 'ignore2']; + ctx.ctrl.panel.tableColumn = 'mean'; + }); + + it('Should return zero', function() { + expect(ctx.data.value).toBe(0); + }); + }); + }); +}); From 7699451d9438546e6655975d53deb7bf6314562d Mon Sep 17 00:00:00 2001 From: David Date: Thu, 26 Jul 2018 14:04:12 +0200 Subject: [PATCH 208/263] Refactor Explore query field (#12643) * Refactor Explore query field - extract typeahead field that only contains logic for the typeahead mechanics - renamed QueryField to PromQueryField, a wrapper around TypeaheadField that deals with Prometheus-specific concepts - PromQueryField creates a promql typeahead by providing the handlers for producing suggestions, and for applying suggestions - The `refresher` promise is needed to trigger a render once an async action in the wrapper returns. This is prep work for a composable query field to be used by Explore, as well as editors in datasource plugins. * Added typeahead handling tests - extracted context-to-suggestion logic to make it testable - kept DOM-dependent parts in main onTypeahead funtion * simplified error handling in explore query field * Refactor query suggestions - use monaco's suggestion types (roughly), see https://github.com/Microsoft/monaco-editor/blob/f6fb545/monaco.d.ts#L4208 - suggest functions and metrics in empty field (ctrl+space) - copy and expand prometheus function docs from prometheus datasource (will be migrated back to the datasource in the future) * Added prop and state types, removed unused cwrp * Split up suggestion processing for code readability --- .../Explore/PromQueryField.jest.tsx | 125 ++++ .../app/containers/Explore/PromQueryField.tsx | 340 +++++++++++ public/app/containers/Explore/QueryField.tsx | 545 ++++++++---------- public/app/containers/Explore/QueryRows.tsx | 6 +- public/app/containers/Explore/Typeahead.tsx | 61 +- .../Explore/slate-plugins/prism/promql.ts | 417 ++++++++++++-- public/sass/components/_slate_editor.scss | 1 + 7 files changed, 1096 insertions(+), 399 deletions(-) create mode 100644 public/app/containers/Explore/PromQueryField.jest.tsx create mode 100644 public/app/containers/Explore/PromQueryField.tsx diff --git a/public/app/containers/Explore/PromQueryField.jest.tsx b/public/app/containers/Explore/PromQueryField.jest.tsx new file mode 100644 index 000000000000..8d2903cb2c22 --- /dev/null +++ b/public/app/containers/Explore/PromQueryField.jest.tsx @@ -0,0 +1,125 @@ +import React from 'react'; +import Enzyme, { shallow } from 'enzyme'; +import Adapter from 'enzyme-adapter-react-16'; + +Enzyme.configure({ adapter: new Adapter() }); + +import PromQueryField from './PromQueryField'; + +describe('PromQueryField typeahead handling', () => { + const defaultProps = { + request: () => ({ data: { data: [] } }), + }; + + it('returns default suggestions on emtpty context', () => { + const instance = shallow().instance() as PromQueryField; + const result = instance.getTypeahead({ text: '', prefix: '', wrapperClasses: [] }); + expect(result.context).toBeUndefined(); + expect(result.refresher).toBeUndefined(); + expect(result.suggestions.length).toEqual(2); + }); + + describe('range suggestions', () => { + it('returns range suggestions in range context', () => { + const instance = shallow().instance() as PromQueryField; + const result = instance.getTypeahead({ text: '1', prefix: '1', wrapperClasses: ['context-range'] }); + expect(result.context).toBe('context-range'); + expect(result.refresher).toBeUndefined(); + expect(result.suggestions).toEqual([ + { + items: [{ label: '1m' }, { label: '5m' }, { label: '10m' }, { label: '30m' }, { label: '1h' }], + label: 'Range vector', + }, + ]); + }); + }); + + describe('metric suggestions', () => { + it('returns metrics suggestions by default', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ text: 'a', prefix: 'a', wrapperClasses: [] }); + expect(result.context).toBeUndefined(); + expect(result.refresher).toBeUndefined(); + expect(result.suggestions.length).toEqual(2); + }); + + it('returns default suggestions after a binary operator', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ text: '*', prefix: '', wrapperClasses: [] }); + expect(result.context).toBeUndefined(); + expect(result.refresher).toBeUndefined(); + expect(result.suggestions.length).toEqual(2); + }); + }); + + describe('label suggestions', () => { + it('returns default label suggestions on label context and no metric', () => { + const instance = shallow().instance() as PromQueryField; + const result = instance.getTypeahead({ text: 'j', prefix: 'j', wrapperClasses: ['context-labels'] }); + expect(result.context).toBe('context-labels'); + expect(result.suggestions).toEqual([{ items: [{ label: 'job' }, { label: 'instance' }], label: 'Labels' }]); + }); + + it('returns label suggestions on label context and metric', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ + text: 'job', + prefix: 'job', + wrapperClasses: ['context-labels'], + metric: 'foo', + }); + expect(result.context).toBe('context-labels'); + expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]); + }); + + it('returns a refresher on label context and unavailable metric', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ + text: 'job', + prefix: 'job', + wrapperClasses: ['context-labels'], + metric: 'xxx', + }); + expect(result.context).toBeUndefined(); + expect(result.refresher).toBeInstanceOf(Promise); + expect(result.suggestions).toEqual([]); + }); + + it('returns label values on label context when given a metric and a label key', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ + text: '=ba', + prefix: 'ba', + wrapperClasses: ['context-labels'], + metric: 'foo', + labelKey: 'bar', + }); + expect(result.context).toBe('context-label-values'); + expect(result.suggestions).toEqual([{ items: [{ label: 'baz' }], label: 'Label values' }]); + }); + + it('returns label suggestions on aggregation context and metric', () => { + const instance = shallow( + + ).instance() as PromQueryField; + const result = instance.getTypeahead({ + text: 'job', + prefix: 'job', + wrapperClasses: ['context-aggregation'], + metric: 'foo', + }); + expect(result.context).toBe('context-aggregation'); + expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]); + }); + }); +}); diff --git a/public/app/containers/Explore/PromQueryField.tsx b/public/app/containers/Explore/PromQueryField.tsx new file mode 100644 index 000000000000..eb8fc25c67f6 --- /dev/null +++ b/public/app/containers/Explore/PromQueryField.tsx @@ -0,0 +1,340 @@ +import _ from 'lodash'; +import React from 'react'; + +// dom also includes Element polyfills +import { getNextCharacter, getPreviousCousin } from './utils/dom'; +import PluginPrism, { setPrismTokens } from './slate-plugins/prism/index'; +import PrismPromql, { FUNCTIONS } from './slate-plugins/prism/promql'; +import RunnerPlugin from './slate-plugins/runner'; +import { processLabels, RATE_RANGES, cleanText } from './utils/prometheus'; + +import TypeaheadField, { + Suggestion, + SuggestionGroup, + TypeaheadInput, + TypeaheadFieldState, + TypeaheadOutput, +} from './QueryField'; + +const EMPTY_METRIC = ''; +const METRIC_MARK = 'metric'; +const PRISM_LANGUAGE = 'promql'; + +export const wrapLabel = label => ({ label }); +export const setFunctionMove = (suggestion: Suggestion): Suggestion => { + suggestion.move = -1; + return suggestion; +}; + +export function willApplySuggestion( + suggestion: string, + { typeaheadContext, typeaheadText }: TypeaheadFieldState +): string { + // Modify suggestion based on context + switch (typeaheadContext) { + case 'context-labels': { + const nextChar = getNextCharacter(); + if (!nextChar || nextChar === '}' || nextChar === ',') { + suggestion += '='; + } + break; + } + + case 'context-label-values': { + // Always add quotes and remove existing ones instead + if (!(typeaheadText.startsWith('="') || typeaheadText.startsWith('"'))) { + suggestion = `"${suggestion}`; + } + if (getNextCharacter() !== '"') { + suggestion = `${suggestion}"`; + } + break; + } + + default: + } + return suggestion; +} + +interface PromQueryFieldProps { + initialQuery?: string | null; + labelKeys?: { [index: string]: string[] }; // metric -> [labelKey,...] + labelValues?: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...] + metrics?: string[]; + onPressEnter?: () => void; + onQueryChange?: (value: string) => void; + portalPrefix?: string; + request?: (url: string) => any; +} + +interface PromQueryFieldState { + labelKeys: { [index: string]: string[] }; // metric -> [labelKey,...] + labelValues: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...] + metrics: string[]; +} + +interface PromTypeaheadInput { + text: string; + prefix: string; + wrapperClasses: string[]; + metric?: string; + labelKey?: string; +} + +class PromQueryField extends React.Component { + plugins: any[]; + + constructor(props, context) { + super(props, context); + + this.plugins = [ + RunnerPlugin({ handler: props.onPressEnter }), + PluginPrism({ definition: PrismPromql, language: PRISM_LANGUAGE }), + ]; + + this.state = { + labelKeys: props.labelKeys || {}, + labelValues: props.labelValues || {}, + metrics: props.metrics || [], + }; + } + + componentDidMount() { + this.fetchMetricNames(); + } + + onChangeQuery = value => { + // Send text change to parent + const { onQueryChange } = this.props; + if (onQueryChange) { + onQueryChange(value); + } + }; + + onReceiveMetrics = () => { + if (!this.state.metrics) { + return; + } + setPrismTokens(PRISM_LANGUAGE, METRIC_MARK, this.state.metrics); + }; + + onTypeahead = (typeahead: TypeaheadInput): TypeaheadOutput => { + const { editorNode, prefix, text, wrapperNode } = typeahead; + + // Get DOM-dependent context + const wrapperClasses = Array.from(wrapperNode.classList); + // Take first metric as lucky guess + const metricNode = editorNode.querySelector(`.${METRIC_MARK}`); + const metric = metricNode && metricNode.textContent; + const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name'); + const labelKey = labelKeyNode && labelKeyNode.textContent; + + const result = this.getTypeahead({ text, prefix, wrapperClasses, metric, labelKey }); + + console.log('handleTypeahead', wrapperClasses, text, prefix, result.context); + + return result; + }; + + // Keep this DOM-free for testing + getTypeahead({ prefix, wrapperClasses, metric, text }: PromTypeaheadInput): TypeaheadOutput { + // Determine candidates by CSS context + if (_.includes(wrapperClasses, 'context-range')) { + // Suggestions for metric[|] + return this.getRangeTypeahead(); + } else if (_.includes(wrapperClasses, 'context-labels')) { + // Suggestions for metric{|} and metric{foo=|}, as well as metric-independent label queries like {|} + return this.getLabelTypeahead.apply(this, arguments); + } else if (metric && _.includes(wrapperClasses, 'context-aggregation')) { + return this.getAggregationTypeahead.apply(this, arguments); + } else if ( + // Non-empty but not inside known token unless it's a metric + (prefix && !_.includes(wrapperClasses, 'token')) || + prefix === metric || + (prefix === '' && !text.match(/^[)\s]+$/)) || // Empty context or after ')' + text.match(/[+\-*/^%]/) // After binary operator + ) { + return this.getEmptyTypeahead(); + } + + return { + suggestions: [], + }; + } + + getEmptyTypeahead(): TypeaheadOutput { + const suggestions: SuggestionGroup[] = []; + suggestions.push({ + prefixMatch: true, + label: 'Functions', + items: FUNCTIONS.map(setFunctionMove), + }); + + if (this.state.metrics) { + suggestions.push({ + label: 'Metrics', + items: this.state.metrics.map(wrapLabel), + }); + } + return { suggestions }; + } + + getRangeTypeahead(): TypeaheadOutput { + return { + context: 'context-range', + suggestions: [ + { + label: 'Range vector', + items: [...RATE_RANGES].map(wrapLabel), + }, + ], + }; + } + + getAggregationTypeahead({ metric }: PromTypeaheadInput): TypeaheadOutput { + let refresher: Promise = null; + const suggestions: SuggestionGroup[] = []; + const labelKeys = this.state.labelKeys[metric]; + if (labelKeys) { + suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) }); + } else { + refresher = this.fetchMetricLabels(metric); + } + + return { + refresher, + suggestions, + context: 'context-aggregation', + }; + } + + getLabelTypeahead({ metric, text, wrapperClasses, labelKey }: PromTypeaheadInput): TypeaheadOutput { + let context: string; + let refresher: Promise = null; + const suggestions: SuggestionGroup[] = []; + if (metric) { + const labelKeys = this.state.labelKeys[metric]; + if (labelKeys) { + if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) { + // Label values + if (labelKey) { + const labelValues = this.state.labelValues[metric][labelKey]; + context = 'context-label-values'; + suggestions.push({ + label: 'Label values', + items: labelValues.map(wrapLabel), + }); + } + } else { + // Label keys + context = 'context-labels'; + suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) }); + } + } else { + refresher = this.fetchMetricLabels(metric); + } + } else { + // Metric-independent label queries + const defaultKeys = ['job', 'instance']; + // Munge all keys that we have seen together + const labelKeys = Object.keys(this.state.labelKeys).reduce((acc, metric) => { + return acc.concat(this.state.labelKeys[metric].filter(key => acc.indexOf(key) === -1)); + }, defaultKeys); + if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) { + // Label values + if (labelKey) { + if (this.state.labelValues[EMPTY_METRIC]) { + const labelValues = this.state.labelValues[EMPTY_METRIC][labelKey]; + context = 'context-label-values'; + suggestions.push({ + label: 'Label values', + items: labelValues.map(wrapLabel), + }); + } else { + // Can only query label values for now (API to query keys is under development) + refresher = this.fetchLabelValues(labelKey); + } + } + } else { + // Label keys + context = 'context-labels'; + suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) }); + } + } + return { context, refresher, suggestions }; + } + + request = url => { + if (this.props.request) { + return this.props.request(url); + } + return fetch(url); + }; + + async fetchLabelValues(key) { + const url = `/api/v1/label/${key}/values`; + try { + const res = await this.request(url); + const body = await (res.data || res.json()); + const pairs = this.state.labelValues[EMPTY_METRIC]; + const values = { + ...pairs, + [key]: body.data, + }; + const labelValues = { + ...this.state.labelValues, + [EMPTY_METRIC]: values, + }; + this.setState({ labelValues }); + } catch (e) { + console.error(e); + } + } + + async fetchMetricLabels(name) { + const url = `/api/v1/series?match[]=${name}`; + try { + const res = await this.request(url); + const body = await (res.data || res.json()); + const { keys, values } = processLabels(body.data); + const labelKeys = { + ...this.state.labelKeys, + [name]: keys, + }; + const labelValues = { + ...this.state.labelValues, + [name]: values, + }; + this.setState({ labelKeys, labelValues }); + } catch (e) { + console.error(e); + } + } + + async fetchMetricNames() { + const url = '/api/v1/label/__name__/values'; + try { + const res = await this.request(url); + const body = await (res.data || res.json()); + this.setState({ metrics: body.data }, this.onReceiveMetrics); + } catch (error) { + console.error(error); + } + } + + render() { + return ( + + ); + } +} + +export default PromQueryField; diff --git a/public/app/containers/Explore/QueryField.tsx b/public/app/containers/Explore/QueryField.tsx index 41f6d53541c6..60caddcad319 100644 --- a/public/app/containers/Explore/QueryField.tsx +++ b/public/app/containers/Explore/QueryField.tsx @@ -1,106 +1,163 @@ +import _ from 'lodash'; import React from 'react'; import ReactDOM from 'react-dom'; -import { Value } from 'slate'; +import { Block, Change, Document, Text, Value } from 'slate'; import { Editor } from 'slate-react'; import Plain from 'slate-plain-serializer'; -// dom also includes Element polyfills -import { getNextCharacter, getPreviousCousin } from './utils/dom'; import BracesPlugin from './slate-plugins/braces'; import ClearPlugin from './slate-plugins/clear'; import NewlinePlugin from './slate-plugins/newline'; -import PluginPrism, { setPrismTokens } from './slate-plugins/prism/index'; -import RunnerPlugin from './slate-plugins/runner'; -import debounce from './utils/debounce'; -import { processLabels, RATE_RANGES, cleanText } from './utils/prometheus'; import Typeahead from './Typeahead'; -const EMPTY_METRIC = ''; -const METRIC_MARK = 'metric'; export const TYPEAHEAD_DEBOUNCE = 300; -function flattenSuggestions(s) { +function flattenSuggestions(s: any[]): any[] { return s ? s.reduce((acc, g) => acc.concat(g.items), []) : []; } -export const getInitialValue = query => - Value.fromJSON({ - document: { - nodes: [ - { - object: 'block', - type: 'paragraph', - nodes: [ - { - object: 'text', - leaves: [ - { - text: query, - }, - ], - }, - ], - }, - ], - }, +export const makeFragment = (text: string): Document => { + const lines = text.split('\n').map(line => + Block.create({ + type: 'paragraph', + nodes: [Text.create(line)], + }) + ); + + const fragment = Document.create({ + nodes: lines, }); + return fragment; +}; + +export const getInitialValue = (value: string): Value => Value.create({ document: makeFragment(value) }); + +export interface Suggestion { + /** + * The label of this completion item. By default + * this is also the text that is inserted when selecting + * this completion. + */ + label: string; + /** + * The kind of this completion item. Based on the kind + * an icon is chosen by the editor. + */ + kind?: string; + /** + * A human-readable string with additional information + * about this item, like type or symbol information. + */ + detail?: string; + /** + * A human-readable string, can be Markdown, that represents a doc-comment. + */ + documentation?: string; + /** + * A string that should be used when comparing this item + * with other items. When `falsy` the `label` is used. + */ + sortText?: string; + /** + * A string that should be used when filtering a set of + * completion items. When `falsy` the `label` is used. + */ + filterText?: string; + /** + * A string or snippet that should be inserted in a document when selecting + * this completion. When `falsy` the `label` is used. + */ + insertText?: string; + /** + * Delete number of characters before the caret position, + * by default the letters from the beginning of the word. + */ + deleteBackwards?: number; + /** + * Number of steps to move after the insertion, can be negative. + */ + move?: number; +} -class Portal extends React.Component { - node: any; +export interface SuggestionGroup { + /** + * Label that will be displayed for all entries of this group. + */ + label: string; + /** + * List of suggestions of this group. + */ + items: Suggestion[]; + /** + * If true, match only by prefix (and not mid-word). + */ + prefixMatch?: boolean; + /** + * If true, do not filter items in this group based on the search. + */ + skipFilter?: boolean; +} - constructor(props) { - super(props); - const { index = 0, prefix = 'query' } = props; - this.node = document.createElement('div'); - this.node.classList.add(`slate-typeahead`, `slate-typeahead-${prefix}-${index}`); - document.body.appendChild(this.node); - } +interface TypeaheadFieldProps { + additionalPlugins?: any[]; + cleanText?: (text: string) => string; + initialValue: string | null; + onBlur?: () => void; + onFocus?: () => void; + onTypeahead?: (typeahead: TypeaheadInput) => TypeaheadOutput; + onValueChanged?: (value: Value) => void; + onWillApplySuggestion?: (suggestion: string, state: TypeaheadFieldState) => string; + placeholder?: string; + portalPrefix?: string; +} - componentWillUnmount() { - document.body.removeChild(this.node); - } +export interface TypeaheadFieldState { + suggestions: SuggestionGroup[]; + typeaheadContext: string | null; + typeaheadIndex: number; + typeaheadPrefix: string; + typeaheadText: string; + value: Value; +} - render() { - return ReactDOM.createPortal(this.props.children, this.node); - } +export interface TypeaheadInput { + editorNode: Element; + prefix: string; + selection?: Selection; + text: string; + wrapperNode: Element; +} + +export interface TypeaheadOutput { + context?: string; + refresher?: Promise<{}>; + suggestions: SuggestionGroup[]; } -class QueryField extends React.Component { - menuEl: any; - plugins: any; +class QueryField extends React.Component { + menuEl: HTMLElement | null; + plugins: any[]; resetTimer: any; constructor(props, context) { super(props, context); - const { prismDefinition = {}, prismLanguage = 'promql' } = props; - - this.plugins = [ - BracesPlugin(), - ClearPlugin(), - RunnerPlugin({ handler: props.onPressEnter }), - NewlinePlugin(), - PluginPrism({ definition: prismDefinition, language: prismLanguage }), - ]; + // Base plugins + this.plugins = [BracesPlugin(), ClearPlugin(), NewlinePlugin(), ...props.additionalPlugins]; this.state = { - labelKeys: {}, - labelValues: {}, - metrics: props.metrics || [], suggestions: [], + typeaheadContext: null, typeaheadIndex: 0, typeaheadPrefix: '', - value: getInitialValue(props.initialQuery || ''), + typeaheadText: '', + value: getInitialValue(props.initialValue || ''), }; } componentDidMount() { this.updateMenu(); - - if (this.props.metrics === undefined) { - this.fetchMetricNames(); - } } componentWillUnmount() { @@ -112,12 +169,9 @@ class QueryField extends React.Component { } componentWillReceiveProps(nextProps) { - if (nextProps.metrics && nextProps.metrics !== this.props.metrics) { - this.setState({ metrics: nextProps.metrics }, this.onMetricsReceived); - } - // initialQuery is null in case the user typed - if (nextProps.initialQuery !== null && nextProps.initialQuery !== this.props.initialQuery) { - this.setState({ value: getInitialValue(nextProps.initialQuery) }); + // initialValue is null in case the user typed + if (nextProps.initialValue !== null && nextProps.initialValue !== this.props.initialValue) { + this.setState({ value: getInitialValue(nextProps.initialValue) }); } } @@ -125,48 +179,28 @@ class QueryField extends React.Component { const changed = value.document !== this.state.value.document; this.setState({ value }, () => { if (changed) { - this.handleChangeQuery(); + this.handleChangeValue(); } }); - window.requestAnimationFrame(this.handleTypeahead); - }; - - onMetricsReceived = () => { - if (!this.state.metrics) { - return; - } - setPrismTokens(this.props.prismLanguage, METRIC_MARK, this.state.metrics); - - // Trigger re-render - window.requestAnimationFrame(() => { - // Bogus edit to trigger highlighting - const change = this.state.value - .change() - .insertText(' ') - .deleteBackward(1); - this.onChange(change); - }); - }; - - request = url => { - if (this.props.request) { - return this.props.request(url); + if (changed) { + window.requestAnimationFrame(this.handleTypeahead); } - return fetch(url); }; - handleChangeQuery = () => { + handleChangeValue = () => { // Send text change to parent - const { onQueryChange } = this.props; - if (onQueryChange) { - onQueryChange(Plain.serialize(this.state.value)); + const { onValueChanged } = this.props; + if (onValueChanged) { + onValueChanged(Plain.serialize(this.state.value)); } }; - handleTypeahead = debounce(() => { + handleTypeahead = _.debounce(async () => { const selection = window.getSelection(); - if (selection.anchorNode) { + const { cleanText, onTypeahead } = this.props; + + if (onTypeahead && selection.anchorNode) { const wrapperNode = selection.anchorNode.parentElement; const editorNode = wrapperNode.closest('.slate-query-field'); if (!editorNode || this.state.value.isBlurred) { @@ -175,164 +209,96 @@ class QueryField extends React.Component { } const range = selection.getRangeAt(0); - const text = selection.anchorNode.textContent; const offset = range.startOffset; - const prefix = cleanText(text.substr(0, offset)); - - // Determine candidates by context - const suggestionGroups = []; - const wrapperClasses = wrapperNode.classList; - let typeaheadContext = null; - - // Take first metric as lucky guess - const metricNode = editorNode.querySelector(`.${METRIC_MARK}`); - - if (wrapperClasses.contains('context-range')) { - // Rate ranges - typeaheadContext = 'context-range'; - suggestionGroups.push({ - label: 'Range vector', - items: [...RATE_RANGES], - }); - } else if (wrapperClasses.contains('context-labels') && metricNode) { - const metric = metricNode.textContent; - const labelKeys = this.state.labelKeys[metric]; - if (labelKeys) { - if ((text && text.startsWith('=')) || wrapperClasses.contains('attr-value')) { - // Label values - const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name'); - if (labelKeyNode) { - const labelKey = labelKeyNode.textContent; - const labelValues = this.state.labelValues[metric][labelKey]; - typeaheadContext = 'context-label-values'; - suggestionGroups.push({ - label: 'Label values', - items: labelValues, - }); - } - } else { - // Label keys - typeaheadContext = 'context-labels'; - suggestionGroups.push({ label: 'Labels', items: labelKeys }); - } - } else { - this.fetchMetricLabels(metric); - } - } else if (wrapperClasses.contains('context-labels') && !metricNode) { - // Empty name queries - const defaultKeys = ['job', 'instance']; - // Munge all keys that we have seen together - const labelKeys = Object.keys(this.state.labelKeys).reduce((acc, metric) => { - return acc.concat(this.state.labelKeys[metric].filter(key => acc.indexOf(key) === -1)); - }, defaultKeys); - if ((text && text.startsWith('=')) || wrapperClasses.contains('attr-value')) { - // Label values - const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name'); - if (labelKeyNode) { - const labelKey = labelKeyNode.textContent; - if (this.state.labelValues[EMPTY_METRIC]) { - const labelValues = this.state.labelValues[EMPTY_METRIC][labelKey]; - typeaheadContext = 'context-label-values'; - suggestionGroups.push({ - label: 'Label values', - items: labelValues, - }); - } else { - // Can only query label values for now (API to query keys is under development) - this.fetchLabelValues(labelKey); - } - } - } else { - // Label keys - typeaheadContext = 'context-labels'; - suggestionGroups.push({ label: 'Labels', items: labelKeys }); - } - } else if (metricNode && wrapperClasses.contains('context-aggregation')) { - typeaheadContext = 'context-aggregation'; - const metric = metricNode.textContent; - const labelKeys = this.state.labelKeys[metric]; - if (labelKeys) { - suggestionGroups.push({ label: 'Labels', items: labelKeys }); - } else { - this.fetchMetricLabels(metric); - } - } else if ( - (this.state.metrics && ((prefix && !wrapperClasses.contains('token')) || text.match(/[+\-*/^%]/))) || - wrapperClasses.contains('context-function') - ) { - // Need prefix for metrics - typeaheadContext = 'context-metrics'; - suggestionGroups.push({ - label: 'Metrics', - items: this.state.metrics, - }); + const text = selection.anchorNode.textContent; + let prefix = text.substr(0, offset); + if (cleanText) { + prefix = cleanText(prefix); } - let results = 0; - const filteredSuggestions = suggestionGroups.map(group => { - if (group.items) { - group.items = group.items.filter(c => c.length !== prefix.length && c.indexOf(prefix) > -1); - results += group.items.length; - } - return group; + const { suggestions, context, refresher } = onTypeahead({ + editorNode, + prefix, + selection, + text, + wrapperNode, }); - console.log('handleTypeahead', selection.anchorNode, wrapperClasses, text, offset, prefix, typeaheadContext); - - this.setState({ - typeaheadPrefix: prefix, - typeaheadContext, - typeaheadText: text, - suggestions: results > 0 ? filteredSuggestions : [], - }); - } - }, TYPEAHEAD_DEBOUNCE); + const filteredSuggestions = suggestions + .map(group => { + if (group.items) { + if (prefix) { + // Filter groups based on prefix + if (!group.skipFilter) { + group.items = group.items.filter(c => (c.filterText || c.label).length >= prefix.length); + if (group.prefixMatch) { + group.items = group.items.filter(c => (c.filterText || c.label).indexOf(prefix) === 0); + } else { + group.items = group.items.filter(c => (c.filterText || c.label).indexOf(prefix) > -1); + } + } + // Filter out the already typed value (prefix) unless it inserts custom text + group.items = group.items.filter(c => c.insertText || (c.filterText || c.label) !== prefix); + } - applyTypeahead(change, suggestion) { - const { typeaheadPrefix, typeaheadContext, typeaheadText } = this.state; + group.items = _.sortBy(group.items, item => item.sortText || item.label); + } + return group; + }) + .filter(group => group.items && group.items.length > 0); // Filter out empty groups - // Modify suggestion based on context - switch (typeaheadContext) { - case 'context-labels': { - const nextChar = getNextCharacter(); - if (!nextChar || nextChar === '}' || nextChar === ',') { - suggestion += '='; + this.setState( + { + suggestions: filteredSuggestions, + typeaheadPrefix: prefix, + typeaheadContext: context, + typeaheadText: text, + }, + () => { + if (refresher) { + refresher.then(this.handleTypeahead).catch(e => console.error(e)); + } } - break; - } + ); + } + }, TYPEAHEAD_DEBOUNCE); - case 'context-label-values': { - // Always add quotes and remove existing ones instead - if (!(typeaheadText.startsWith('="') || typeaheadText.startsWith('"'))) { - suggestion = `"${suggestion}`; - } - if (getNextCharacter() !== '"') { - suggestion = `${suggestion}"`; - } - break; - } + applyTypeahead(change: Change, suggestion: Suggestion): Change { + const { cleanText, onWillApplySuggestion } = this.props; + const { typeaheadPrefix, typeaheadText } = this.state; + let suggestionText = suggestion.insertText || suggestion.label; + const move = suggestion.move || 0; - default: + if (onWillApplySuggestion) { + suggestionText = onWillApplySuggestion(suggestionText, { ...this.state }); } this.resetTypeahead(); // Remove the current, incomplete text and replace it with the selected suggestion - let backward = typeaheadPrefix.length; - const text = cleanText(typeaheadText); + const backward = suggestion.deleteBackwards || typeaheadPrefix.length; + const text = cleanText ? cleanText(typeaheadText) : typeaheadText; const suffixLength = text.length - typeaheadPrefix.length; const offset = typeaheadText.indexOf(typeaheadPrefix); - const midWord = typeaheadPrefix && ((suffixLength > 0 && offset > -1) || suggestion === typeaheadText); + const midWord = typeaheadPrefix && ((suffixLength > 0 && offset > -1) || suggestionText === typeaheadText); const forward = midWord ? suffixLength + offset : 0; - return ( - change - // TODO this line breaks if cursor was moved left and length is longer than whole prefix + // If new-lines, apply suggestion as block + if (suggestionText.match(/\n/)) { + const fragment = makeFragment(suggestionText); + return change .deleteBackward(backward) .deleteForward(forward) - .insertText(suggestion) - .focus() - ); + .insertFragment(fragment) + .focus(); + } + + return change + .deleteBackward(backward) + .deleteForward(forward) + .insertText(suggestionText) + .move(move) + .focus(); } onKeyDown = (event, change) => { @@ -413,74 +379,6 @@ class QueryField extends React.Component { }); }; - async fetchLabelValues(key) { - const url = `/api/v1/label/${key}/values`; - try { - const res = await this.request(url); - console.log(res); - const body = await (res.data || res.json()); - const pairs = this.state.labelValues[EMPTY_METRIC]; - const values = { - ...pairs, - [key]: body.data, - }; - // const labelKeys = { - // ...this.state.labelKeys, - // [EMPTY_METRIC]: keys, - // }; - const labelValues = { - ...this.state.labelValues, - [EMPTY_METRIC]: values, - }; - this.setState({ labelValues }, this.handleTypeahead); - } catch (e) { - if (this.props.onRequestError) { - this.props.onRequestError(e); - } else { - console.error(e); - } - } - } - - async fetchMetricLabels(name) { - const url = `/api/v1/series?match[]=${name}`; - try { - const res = await this.request(url); - const body = await (res.data || res.json()); - const { keys, values } = processLabels(body.data); - const labelKeys = { - ...this.state.labelKeys, - [name]: keys, - }; - const labelValues = { - ...this.state.labelValues, - [name]: values, - }; - this.setState({ labelKeys, labelValues }, this.handleTypeahead); - } catch (e) { - if (this.props.onRequestError) { - this.props.onRequestError(e); - } else { - console.error(e); - } - } - } - - async fetchMetricNames() { - const url = '/api/v1/label/__name__/values'; - try { - const res = await this.request(url); - const body = await (res.data || res.json()); - this.setState({ metrics: body.data }, this.onMetricsReceived); - } catch (error) { - if (this.props.onRequestError) { - this.props.onRequestError(error); - } else { - console.error(error); - } - } - } - handleBlur = () => { const { onBlur } = this.props; // If we dont wait here, menu clicks wont work because the menu @@ -498,7 +396,7 @@ class QueryField extends React.Component { } }; - handleClickMenu = item => { + onClickMenu = (item: Suggestion) => { // Manually triggering change const change = this.applyTypeahead(this.state.value.change(), item); this.onChange(change); @@ -531,7 +429,7 @@ class QueryField extends React.Component { // Write DOM requestAnimationFrame(() => { - menu.style.opacity = 1; + menu.style.opacity = '1'; menu.style.top = `${rect.top + scrollY + rect.height + 4}px`; menu.style.left = `${rect.left + scrollX - 2}px`; }); @@ -554,17 +452,16 @@ class QueryField extends React.Component { let selectedIndex = Math.max(this.state.typeaheadIndex, 0); const flattenedSuggestions = flattenSuggestions(suggestions); selectedIndex = selectedIndex % flattenedSuggestions.length || 0; - const selectedKeys = (flattenedSuggestions.length > 0 ? [flattenedSuggestions[selectedIndex]] : []).map( - i => (typeof i === 'object' ? i.text : i) - ); + const selectedItem: Suggestion | null = + flattenedSuggestions.length > 0 ? flattenedSuggestions[selectedIndex] : null; // Create typeahead in DOM root so we can later position it absolutely return ( @@ -591,4 +488,24 @@ class QueryField extends React.Component { } } +class Portal extends React.Component<{ index?: number; prefix: string }, {}> { + node: HTMLElement; + + constructor(props) { + super(props); + const { index = 0, prefix = 'query' } = props; + this.node = document.createElement('div'); + this.node.classList.add(`slate-typeahead`, `slate-typeahead-${prefix}-${index}`); + document.body.appendChild(this.node); + } + + componentWillUnmount() { + document.body.removeChild(this.node); + } + + render() { + return ReactDOM.createPortal(this.props.children, this.node); + } +} + export default QueryField; diff --git a/public/app/containers/Explore/QueryRows.tsx b/public/app/containers/Explore/QueryRows.tsx index a968e1e2c641..3aaa006d6df1 100644 --- a/public/app/containers/Explore/QueryRows.tsx +++ b/public/app/containers/Explore/QueryRows.tsx @@ -1,7 +1,6 @@ import React, { PureComponent } from 'react'; -import promql from './slate-plugins/prism/promql'; -import QueryField from './QueryField'; +import QueryField from './PromQueryField'; class QueryRow extends PureComponent { constructor(props) { @@ -62,9 +61,6 @@ class QueryRow extends PureComponent { portalPrefix="explore" onPressEnter={this.handlePressEnter} onQueryChange={this.handleChangeQuery} - placeholder="Enter a PromQL query" - prismLanguage="promql" - prismDefinition={promql} request={request} />
    diff --git a/public/app/containers/Explore/Typeahead.tsx b/public/app/containers/Explore/Typeahead.tsx index 44fce7f8c7eb..9924488035c9 100644 --- a/public/app/containers/Explore/Typeahead.tsx +++ b/public/app/containers/Explore/Typeahead.tsx @@ -1,17 +1,26 @@ import React from 'react'; -function scrollIntoView(el) { +import { Suggestion, SuggestionGroup } from './QueryField'; + +function scrollIntoView(el: HTMLElement) { if (!el || !el.offsetParent) { return; } - const container = el.offsetParent; + const container = el.offsetParent as HTMLElement; if (el.offsetTop > container.scrollTop + container.offsetHeight || el.offsetTop < container.scrollTop) { container.scrollTop = el.offsetTop - container.offsetTop; } } -class TypeaheadItem extends React.PureComponent { - el: any; +interface TypeaheadItemProps { + isSelected: boolean; + item: Suggestion; + onClickItem: (Suggestion) => void; +} + +class TypeaheadItem extends React.PureComponent { + el: HTMLElement; + componentDidUpdate(prevProps) { if (this.props.isSelected && !prevProps.isSelected) { scrollIntoView(this.el); @@ -22,20 +31,30 @@ class TypeaheadItem extends React.PureComponent { this.el = el; }; + onClick = () => { + this.props.onClickItem(this.props.item); + }; + render() { - const { hint, isSelected, label, onClickItem } = this.props; + const { isSelected, item } = this.props; const className = isSelected ? 'typeahead-item typeahead-item__selected' : 'typeahead-item'; - const onClick = () => onClickItem(label); return ( -
  • - {label} - {hint && isSelected ?
    {hint}
    : null} +
  • + {item.detail || item.label} + {item.documentation && isSelected ?
    {item.documentation}
    : null}
  • ); } } -class TypeaheadGroup extends React.PureComponent { +interface TypeaheadGroupProps { + items: Suggestion[]; + label: string; + onClickItem: (Suggestion) => void; + selected: Suggestion; +} + +class TypeaheadGroup extends React.PureComponent { render() { const { items, label, selected, onClickItem } = this.props; return ( @@ -43,16 +62,8 @@ class TypeaheadGroup extends React.PureComponent {
    {label}
      {items.map(item => { - const text = typeof item === 'object' ? item.text : item; - const label = typeof item === 'object' ? item.display || item.text : item; return ( - -1} - hint={item.hint} - label={label} - /> + ); })}
    @@ -61,13 +72,19 @@ class TypeaheadGroup extends React.PureComponent { } } -class Typeahead extends React.PureComponent { +interface TypeaheadProps { + groupedItems: SuggestionGroup[]; + menuRef: any; + selectedItem: Suggestion | null; + onClickItem: (Suggestion) => void; +} +class Typeahead extends React.PureComponent { render() { - const { groupedItems, menuRef, selectedItems, onClickItem } = this.props; + const { groupedItems, menuRef, selectedItem, onClickItem } = this.props; return (
      {groupedItems.map(g => ( - + ))}
    ); diff --git a/public/app/containers/Explore/slate-plugins/prism/promql.ts b/public/app/containers/Explore/slate-plugins/prism/promql.ts index 0f0be18cb6fe..a17c5fbc4f6c 100644 --- a/public/app/containers/Explore/slate-plugins/prism/promql.ts +++ b/public/app/containers/Explore/slate-plugins/prism/promql.ts @@ -1,67 +1,368 @@ +/* tslint:disable max-line-length */ + export const OPERATORS = ['by', 'group_left', 'group_right', 'ignoring', 'on', 'offset', 'without']; const AGGREGATION_OPERATORS = [ - 'sum', - 'min', - 'max', - 'avg', - 'stddev', - 'stdvar', - 'count', - 'count_values', - 'bottomk', - 'topk', - 'quantile', + { + label: 'sum', + insertText: 'sum()', + documentation: 'Calculate sum over dimensions', + }, + { + label: 'min', + insertText: 'min()', + documentation: 'Select minimum over dimensions', + }, + { + label: 'max', + insertText: 'max()', + documentation: 'Select maximum over dimensions', + }, + { + label: 'avg', + insertText: 'avg()', + documentation: 'Calculate the average over dimensions', + }, + { + label: 'stddev', + insertText: 'stddev()', + documentation: 'Calculate population standard deviation over dimensions', + }, + { + label: 'stdvar', + insertText: 'stdvar()', + documentation: 'Calculate population standard variance over dimensions', + }, + { + label: 'count', + insertText: 'count()', + documentation: 'Count number of elements in the vector', + }, + { + label: 'count_values', + insertText: 'count_values()', + documentation: 'Count number of elements with the same value', + }, + { + label: 'bottomk', + insertText: 'bottomk()', + documentation: 'Smallest k elements by sample value', + }, + { + label: 'topk', + insertText: 'topk()', + documentation: 'Largest k elements by sample value', + }, + { + label: 'quantile', + insertText: 'quantile()', + documentation: 'Calculate φ-quantile (0 ≤ φ ≤ 1) over dimensions', + }, ]; export const FUNCTIONS = [ ...AGGREGATION_OPERATORS, - 'abs', - 'absent', - 'ceil', - 'changes', - 'clamp_max', - 'clamp_min', - 'count_scalar', - 'day_of_month', - 'day_of_week', - 'days_in_month', - 'delta', - 'deriv', - 'drop_common_labels', - 'exp', - 'floor', - 'histogram_quantile', - 'holt_winters', - 'hour', - 'idelta', - 'increase', - 'irate', - 'label_replace', - 'ln', - 'log2', - 'log10', - 'minute', - 'month', - 'predict_linear', - 'rate', - 'resets', - 'round', - 'scalar', - 'sort', - 'sort_desc', - 'sqrt', - 'time', - 'vector', - 'year', - 'avg_over_time', - 'min_over_time', - 'max_over_time', - 'sum_over_time', - 'count_over_time', - 'quantile_over_time', - 'stddev_over_time', - 'stdvar_over_time', + { + insertText: 'abs()', + label: 'abs', + detail: 'abs(v instant-vector)', + documentation: 'Returns the input vector with all sample values converted to their absolute value.', + }, + { + insertText: 'absent()', + label: 'absent', + detail: 'absent(v instant-vector)', + documentation: + 'Returns an empty vector if the vector passed to it has any elements and a 1-element vector with the value 1 if the vector passed to it has no elements. This is useful for alerting on when no time series exist for a given metric name and label combination.', + }, + { + insertText: 'ceil()', + label: 'ceil', + detail: 'ceil(v instant-vector)', + documentation: 'Rounds the sample values of all elements in `v` up to the nearest integer.', + }, + { + insertText: 'changes()', + label: 'changes', + detail: 'changes(v range-vector)', + documentation: + 'For each input time series, `changes(v range-vector)` returns the number of times its value has changed within the provided time range as an instant vector.', + }, + { + insertText: 'clamp_max()', + label: 'clamp_max', + detail: 'clamp_max(v instant-vector, max scalar)', + documentation: 'Clamps the sample values of all elements in `v` to have an upper limit of `max`.', + }, + { + insertText: 'clamp_min()', + label: 'clamp_min', + detail: 'clamp_min(v instant-vector, min scalar)', + documentation: 'Clamps the sample values of all elements in `v` to have a lower limit of `min`.', + }, + { + insertText: 'count_scalar()', + label: 'count_scalar', + detail: 'count_scalar(v instant-vector)', + documentation: + 'Returns the number of elements in a time series vector as a scalar. This is in contrast to the `count()` aggregation operator, which always returns a vector (an empty one if the input vector is empty) and allows grouping by labels via a `by` clause.', + }, + { + insertText: 'day_of_month()', + label: 'day_of_month', + detail: 'day_of_month(v=vector(time()) instant-vector)', + documentation: 'Returns the day of the month for each of the given times in UTC. Returned values are from 1 to 31.', + }, + { + insertText: 'day_of_week()', + label: 'day_of_week', + detail: 'day_of_week(v=vector(time()) instant-vector)', + documentation: + 'Returns the day of the week for each of the given times in UTC. Returned values are from 0 to 6, where 0 means Sunday etc.', + }, + { + insertText: 'days_in_month()', + label: 'days_in_month', + detail: 'days_in_month(v=vector(time()) instant-vector)', + documentation: + 'Returns number of days in the month for each of the given times in UTC. Returned values are from 28 to 31.', + }, + { + insertText: 'delta()', + label: 'delta', + detail: 'delta(v range-vector)', + documentation: + 'Calculates the difference between the first and last value of each time series element in a range vector `v`, returning an instant vector with the given deltas and equivalent labels. The delta is extrapolated to cover the full time range as specified in the range vector selector, so that it is possible to get a non-integer result even if the sample values are all integers.', + }, + { + insertText: 'deriv()', + label: 'deriv', + detail: 'deriv(v range-vector)', + documentation: + 'Calculates the per-second derivative of the time series in a range vector `v`, using simple linear regression.', + }, + { + insertText: 'drop_common_labels()', + label: 'drop_common_labels', + detail: 'drop_common_labels(instant-vector)', + documentation: 'Drops all labels that have the same name and value across all series in the input vector.', + }, + { + insertText: 'exp()', + label: 'exp', + detail: 'exp(v instant-vector)', + documentation: + 'Calculates the exponential function for all elements in `v`.\nSpecial cases are:\n* `Exp(+Inf) = +Inf` \n* `Exp(NaN) = NaN`', + }, + { + insertText: 'floor()', + label: 'floor', + detail: 'floor(v instant-vector)', + documentation: 'Rounds the sample values of all elements in `v` down to the nearest integer.', + }, + { + insertText: 'histogram_quantile()', + label: 'histogram_quantile', + detail: 'histogram_quantile(φ float, b instant-vector)', + documentation: + 'Calculates the φ-quantile (0 ≤ φ ≤ 1) from the buckets `b` of a histogram. The samples in `b` are the counts of observations in each bucket. Each sample must have a label `le` where the label value denotes the inclusive upper bound of the bucket. (Samples without such a label are silently ignored.) The histogram metric type automatically provides time series with the `_bucket` suffix and the appropriate labels.', + }, + { + insertText: 'holt_winters()', + label: 'holt_winters', + detail: 'holt_winters(v range-vector, sf scalar, tf scalar)', + documentation: + 'Produces a smoothed value for time series based on the range in `v`. The lower the smoothing factor `sf`, the more importance is given to old data. The higher the trend factor `tf`, the more trends in the data is considered. Both `sf` and `tf` must be between 0 and 1.', + }, + { + insertText: 'hour()', + label: 'hour', + detail: 'hour(v=vector(time()) instant-vector)', + documentation: 'Returns the hour of the day for each of the given times in UTC. Returned values are from 0 to 23.', + }, + { + insertText: 'idelta()', + label: 'idelta', + detail: 'idelta(v range-vector)', + documentation: + 'Calculates the difference between the last two samples in the range vector `v`, returning an instant vector with the given deltas and equivalent labels.', + }, + { + insertText: 'increase()', + label: 'increase', + detail: 'increase(v range-vector)', + documentation: + 'Calculates the increase in the time series in the range vector. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for. The increase is extrapolated to cover the full time range as specified in the range vector selector, so that it is possible to get a non-integer result even if a counter increases only by integer increments.', + }, + { + insertText: 'irate()', + label: 'irate', + detail: 'irate(v range-vector)', + documentation: + 'Calculates the per-second instant rate of increase of the time series in the range vector. This is based on the last two data points. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for.', + }, + { + insertText: 'label_replace()', + label: 'label_replace', + detail: 'label_replace(v instant-vector, dst_label string, replacement string, src_label string, regex string)', + documentation: + "For each timeseries in `v`, `label_replace(v instant-vector, dst_label string, replacement string, src_label string, regex string)` matches the regular expression `regex` against the label `src_label`. If it matches, then the timeseries is returned with the label `dst_label` replaced by the expansion of `replacement`. `$1` is replaced with the first matching subgroup, `$2` with the second etc. If the regular expression doesn't match then the timeseries is returned unchanged.", + }, + { + insertText: 'ln()', + label: 'ln', + detail: 'ln(v instant-vector)', + documentation: + 'calculates the natural logarithm for all elements in `v`.\nSpecial cases are:\n * `ln(+Inf) = +Inf`\n * `ln(0) = -Inf`\n * `ln(x < 0) = NaN`\n * `ln(NaN) = NaN`', + }, + { + insertText: 'log2()', + label: 'log2', + detail: 'log2(v instant-vector)', + documentation: + 'Calculates the binary logarithm for all elements in `v`. The special cases are equivalent to those in `ln`.', + }, + { + insertText: 'log10()', + label: 'log10', + detail: 'log10(v instant-vector)', + documentation: + 'Calculates the decimal logarithm for all elements in `v`. The special cases are equivalent to those in `ln`.', + }, + { + insertText: 'minute()', + label: 'minute', + detail: 'minute(v=vector(time()) instant-vector)', + documentation: + 'Returns the minute of the hour for each of the given times in UTC. Returned values are from 0 to 59.', + }, + { + insertText: 'month()', + label: 'month', + detail: 'month(v=vector(time()) instant-vector)', + documentation: + 'Returns the month of the year for each of the given times in UTC. Returned values are from 1 to 12, where 1 means January etc.', + }, + { + insertText: 'predict_linear()', + label: 'predict_linear', + detail: 'predict_linear(v range-vector, t scalar)', + documentation: + 'Predicts the value of time series `t` seconds from now, based on the range vector `v`, using simple linear regression.', + }, + { + insertText: 'rate()', + label: 'rate', + detail: 'rate(v range-vector)', + documentation: + "Calculates the per-second average rate of increase of the time series in the range vector. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for. Also, the calculation extrapolates to the ends of the time range, allowing for missed scrapes or imperfect alignment of scrape cycles with the range's time period.", + }, + { + insertText: 'resets()', + label: 'resets', + detail: 'resets(v range-vector)', + documentation: + 'For each input time series, `resets(v range-vector)` returns the number of counter resets within the provided time range as an instant vector. Any decrease in the value between two consecutive samples is interpreted as a counter reset.', + }, + { + insertText: 'round()', + label: 'round', + detail: 'round(v instant-vector, to_nearest=1 scalar)', + documentation: + 'Rounds the sample values of all elements in `v` to the nearest integer. Ties are resolved by rounding up. The optional `to_nearest` argument allows specifying the nearest multiple to which the sample values should be rounded. This multiple may also be a fraction.', + }, + { + insertText: 'scalar()', + label: 'scalar', + detail: 'scalar(v instant-vector)', + documentation: + 'Given a single-element input vector, `scalar(v instant-vector)` returns the sample value of that single element as a scalar. If the input vector does not have exactly one element, `scalar` will return `NaN`.', + }, + { + insertText: 'sort()', + label: 'sort', + detail: 'sort(v instant-vector)', + documentation: 'Returns vector elements sorted by their sample values, in ascending order.', + }, + { + insertText: 'sort_desc()', + label: 'sort_desc', + detail: 'sort_desc(v instant-vector)', + documentation: 'Returns vector elements sorted by their sample values, in descending order.', + }, + { + insertText: 'sqrt()', + label: 'sqrt', + detail: 'sqrt(v instant-vector)', + documentation: 'Calculates the square root of all elements in `v`.', + }, + { + insertText: 'time()', + label: 'time', + detail: 'time()', + documentation: + 'Returns the number of seconds since January 1, 1970 UTC. Note that this does not actually return the current time, but the time at which the expression is to be evaluated.', + }, + { + insertText: 'vector()', + label: 'vector', + detail: 'vector(s scalar)', + documentation: 'Returns the scalar `s` as a vector with no labels.', + }, + { + insertText: 'year()', + label: 'year', + detail: 'year(v=vector(time()) instant-vector)', + documentation: 'Returns the year for each of the given times in UTC.', + }, + { + insertText: 'avg_over_time()', + label: 'avg_over_time', + detail: 'avg_over_time(range-vector)', + documentation: 'The average value of all points in the specified interval.', + }, + { + insertText: 'min_over_time()', + label: 'min_over_time', + detail: 'min_over_time(range-vector)', + documentation: 'The minimum value of all points in the specified interval.', + }, + { + insertText: 'max_over_time()', + label: 'max_over_time', + detail: 'max_over_time(range-vector)', + documentation: 'The maximum value of all points in the specified interval.', + }, + { + insertText: 'sum_over_time()', + label: 'sum_over_time', + detail: 'sum_over_time(range-vector)', + documentation: 'The sum of all values in the specified interval.', + }, + { + insertText: 'count_over_time()', + label: 'count_over_time', + detail: 'count_over_time(range-vector)', + documentation: 'The count of all values in the specified interval.', + }, + { + insertText: 'quantile_over_time()', + label: 'quantile_over_time', + detail: 'quantile_over_time(scalar, range-vector)', + documentation: 'The φ-quantile (0 ≤ φ ≤ 1) of the values in the specified interval.', + }, + { + insertText: 'stddev_over_time()', + label: 'stddev_over_time', + detail: 'stddev_over_time(range-vector)', + documentation: 'The population standard deviation of the values in the specified interval.', + }, + { + insertText: 'stdvar_over_time()', + label: 'stdvar_over_time', + detail: 'stdvar_over_time(range-vector)', + documentation: 'The population standard variance of the values in the specified interval.', + }, ]; const tokenizer = { @@ -93,7 +394,7 @@ const tokenizer = { }, }, }, - function: new RegExp(`\\b(?:${FUNCTIONS.join('|')})(?=\\s*\\()`, 'i'), + function: new RegExp(`\\b(?:${FUNCTIONS.map(f => f.label).join('|')})(?=\\s*\\()`, 'i'), 'context-range': [ { pattern: /\[[^\]]*(?=])/, // [1m] diff --git a/public/sass/components/_slate_editor.scss b/public/sass/components/_slate_editor.scss index 119c468292a4..10b2238f4b8d 100644 --- a/public/sass/components/_slate_editor.scss +++ b/public/sass/components/_slate_editor.scss @@ -71,6 +71,7 @@ .typeahead-item-hint { font-size: $font-size-xs; color: $text-color; + white-space: normal; } } } From fc06f8bfe71d758148708dee23c52af678935a52 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Thu, 26 Jul 2018 17:22:15 +0200 Subject: [PATCH 209/263] Pass more tests --- public/app/plugins/panel/singlestat/module.ts | 1 + .../panel/singlestat/specs/singlestat.jest.ts | 34 ++++++++----------- 2 files changed, 15 insertions(+), 20 deletions(-) diff --git a/public/app/plugins/panel/singlestat/module.ts b/public/app/plugins/panel/singlestat/module.ts index ebd2628b0864..7fafb5902d13 100644 --- a/public/app/plugins/panel/singlestat/module.ts +++ b/public/app/plugins/panel/singlestat/module.ts @@ -310,6 +310,7 @@ class SingleStatCtrl extends MetricsPanelCtrl { data.valueRounded = data.value; data.valueFormatted = formatFunc(data.value, this.dashboard.isTimezoneUtc()); } else { + console.log(lastPoint, lastValue); data.value = this.series[0].stats[this.panel.valueName]; data.flotpairs = this.series[0].flotpairs; diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts index 2c945aa6eb23..7b89f86250c4 100644 --- a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -7,7 +7,7 @@ import moment from 'moment'; describe('SingleStatCtrl', function() { let ctx = {}; let epoch = 1505826363746; - let clock; + Date.now = () => epoch; let $scope = { $on: () => {}, @@ -24,7 +24,7 @@ describe('SingleStatCtrl', function() { }, }; SingleStatCtrl.prototype.dashboard = { - isTimezoneUtc: () => {}, + isTimezoneUtc: jest.fn(() => true), }; function singleStatScenario(desc, func) { @@ -89,29 +89,30 @@ describe('SingleStatCtrl', function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeAsIso'; + ctx.ctrl.dashboard.isTimezoneUtc = () => false; }); it('Should use time instead of value', function() { - console.log(ctx.data.value); expect(ctx.data.value).toBe(1505634997920); expect(ctx.data.valueRounded).toBe(1505634997920); }); it('should set formatted value', function() { - expect(ctx.data.valueFormatted).toBe(moment(1505634997920).format('YYYY-MM-DD HH:mm:ss')); + expect(ctx.data.valueFormatted).toBe('2017-09-17 09:56:37'); }); }); singleStatScenario('showing last iso time instead of value (in UTC)', function(ctx) { ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 5000]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeAsIso'; // ctx.setIsUtc(true); + ctx.ctrl.dashboard.isTimezoneUtc = () => true; }); - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).toBe(moment.utc(1505634997920).format('YYYY-MM-DD HH:mm:ss')); + it('should set value', function() { + expect(ctx.data.valueFormatted).toBe('1970-01-01 00:00:05'); }); }); @@ -120,6 +121,7 @@ describe('SingleStatCtrl', function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeAsUS'; + ctx.ctrl.dashboard.isTimezoneUtc = () => false; }); it('Should use time instead of value', function() { @@ -134,21 +136,22 @@ describe('SingleStatCtrl', function() { singleStatScenario('showing last us time instead of value (in UTC)', function(ctx) { ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 5000]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeAsUS'; // ctx.setIsUtc(true); + ctx.ctrl.dashboard.isTimezoneUtc = () => true; }); it('should set formatted value', function() { - expect(ctx.data.valueFormatted).toBe(moment.utc(1505634997920).format('MM/DD/YYYY h:mm:ss a')); + expect(ctx.data.valueFormatted).toBe('01/01/1970 12:00:05 am'); }); }); singleStatScenario('showing last time from now instead of value', function(ctx) { beforeEach(() => { // clock = sinon.useFakeTimers(epoch); - jest.useFakeTimers(); + //jest.useFakeTimers(); }); ctx.setup(function() { @@ -167,16 +170,11 @@ describe('SingleStatCtrl', function() { }); afterEach(() => { - jest.clearAllTimers(); + // jest.clearAllTimers(); }); }); singleStatScenario('showing last time from now instead of value (in UTC)', function(ctx) { - beforeEach(() => { - // clock = sinon.useFakeTimers(epoch); - jest.useFakeTimers(); - }); - ctx.setup(function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; ctx.ctrl.panel.valueName = 'last_time'; @@ -187,10 +185,6 @@ describe('SingleStatCtrl', function() { it('should set formatted value', function() { expect(ctx.data.valueFormatted).toBe('2 days ago'); }); - - afterEach(() => { - jest.clearAllTimers(); - }); }); singleStatScenario('MainValue should use same number for decimals as displayed when checking thresholds', function( From d42cea5d42c58175448986a8682b7a8c137be088 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 26 Jul 2018 18:09:42 +0200 Subject: [PATCH 210/263] refactor sql engine to make it hold all common code for sql datasources --- pkg/tsdb/sql_engine.go | 324 +++++++++++++++++++++++++++++++++++------ 1 file changed, 279 insertions(+), 45 deletions(-) diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go index ec908aeb9de8..9321e8912dc5 100644 --- a/pkg/tsdb/sql_engine.go +++ b/pkg/tsdb/sql_engine.go @@ -1,11 +1,17 @@ package tsdb import ( + "container/list" "context" + "database/sql" "fmt" + "math" + "strings" "sync" "time" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/components/null" "github.com/go-xorm/core" @@ -14,27 +20,15 @@ import ( "github.com/grafana/grafana/pkg/models" ) -// SqlEngine is a wrapper class around xorm for relational database data sources. -type SqlEngine interface { - InitEngine(driverName string, dsInfo *models.DataSource, cnnstr string) error - Query( - ctx context.Context, - ds *models.DataSource, - query *TsdbQuery, - transformToTimeSeries func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error, - transformToTable func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error, - ) (*Response, error) -} - // SqlMacroEngine interpolates macros into sql. It takes in the Query to have access to query context and // timeRange to be able to generate queries that use from and to. type SqlMacroEngine interface { Interpolate(query *Query, timeRange *TimeRange, sql string) (string, error) } -type DefaultSqlEngine struct { - MacroEngine SqlMacroEngine - XormEngine *xorm.Engine +// SqlTableRowTransformer transforms a query result row to RowValues with proper types. +type SqlTableRowTransformer interface { + Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (RowValues, error) } type engineCacheType struct { @@ -48,69 +42,92 @@ var engineCache = engineCacheType{ versions: make(map[int64]int), } -// InitEngine creates the db connection and inits the xorm engine or loads it from the engine cache -func (e *DefaultSqlEngine) InitEngine(driverName string, dsInfo *models.DataSource, cnnstr string) error { +var NewXormEngine = func(driverName string, connectionString string) (*xorm.Engine, error) { + return xorm.NewEngine(driverName, connectionString) +} + +type sqlQueryEndpoint struct { + macroEngine SqlMacroEngine + rowTransformer SqlTableRowTransformer + engine *xorm.Engine + timeColumnNames []string + metricColumnTypes []string + log log.Logger +} + +type SqlQueryEndpointConfiguration struct { + DriverName string + Datasource *models.DataSource + ConnectionString string + TimeColumnNames []string + MetricColumnTypes []string +} + +var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransformer SqlTableRowTransformer, macroEngine SqlMacroEngine, log log.Logger) (TsdbQueryEndpoint, error) { + queryEndpoint := sqlQueryEndpoint{ + rowTransformer: rowTransformer, + macroEngine: macroEngine, + timeColumnNames: []string{"time"}, + log: log, + } + + if len(config.TimeColumnNames) > 0 { + queryEndpoint.timeColumnNames = config.TimeColumnNames + } + engineCache.Lock() defer engineCache.Unlock() - if engine, present := engineCache.cache[dsInfo.Id]; present { - if version := engineCache.versions[dsInfo.Id]; version == dsInfo.Version { - e.XormEngine = engine - return nil + if engine, present := engineCache.cache[config.Datasource.Id]; present { + if version := engineCache.versions[config.Datasource.Id]; version == config.Datasource.Version { + queryEndpoint.engine = engine + return &queryEndpoint, nil } } - engine, err := xorm.NewEngine(driverName, cnnstr) + engine, err := NewXormEngine(config.DriverName, config.ConnectionString) if err != nil { - return err + return nil, err } engine.SetMaxOpenConns(10) engine.SetMaxIdleConns(10) - engineCache.versions[dsInfo.Id] = dsInfo.Version - engineCache.cache[dsInfo.Id] = engine - e.XormEngine = engine + engineCache.versions[config.Datasource.Id] = config.Datasource.Version + engineCache.cache[config.Datasource.Id] = engine + queryEndpoint.engine = engine - return nil + return &queryEndpoint, nil } -// Query is a default implementation of the Query method for an SQL data source. -// The caller of this function must implement transformToTimeSeries and transformToTable and -// pass them in as parameters. -func (e *DefaultSqlEngine) Query( - ctx context.Context, - dsInfo *models.DataSource, - tsdbQuery *TsdbQuery, - transformToTimeSeries func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error, - transformToTable func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error, -) (*Response, error) { +// Query is the main function for the SqlQueryEndpoint +func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *TsdbQuery) (*Response, error) { result := &Response{ Results: make(map[string]*QueryResult), } - session := e.XormEngine.NewSession() + session := e.engine.NewSession() defer session.Close() db := session.DB() for _, query := range tsdbQuery.Queries { - rawSql := query.Model.Get("rawSql").MustString() - if rawSql == "" { + rawSQL := query.Model.Get("rawSql").MustString() + if rawSQL == "" { continue } queryResult := &QueryResult{Meta: simplejson.New(), RefId: query.RefId} result.Results[query.RefId] = queryResult - rawSql, err := e.MacroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSql) + rawSQL, err := e.macroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSQL) if err != nil { queryResult.Error = err continue } - queryResult.Meta.Set("sql", rawSql) + queryResult.Meta.Set("sql", rawSQL) - rows, err := db.Query(rawSql) + rows, err := db.Query(rawSQL) if err != nil { queryResult.Error = err continue @@ -122,13 +139,13 @@ func (e *DefaultSqlEngine) Query( switch format { case "time_series": - err := transformToTimeSeries(query, rows, queryResult, tsdbQuery) + err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery) if err != nil { queryResult.Error = err continue } case "table": - err := transformToTable(query, rows, queryResult, tsdbQuery) + err := e.transformToTable(query, rows, queryResult, tsdbQuery) if err != nil { queryResult.Error = err continue @@ -139,6 +156,223 @@ func (e *DefaultSqlEngine) Query( return result, nil } +func (e *sqlQueryEndpoint) transformToTable(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error { + columnNames, err := rows.Columns() + columnCount := len(columnNames) + + if err != nil { + return err + } + + rowLimit := 1000000 + rowCount := 0 + timeIndex := -1 + + table := &Table{ + Columns: make([]TableColumn, columnCount), + Rows: make([]RowValues, 0), + } + + for i, name := range columnNames { + table.Columns[i].Text = name + + for _, tc := range e.timeColumnNames { + if name == tc { + timeIndex = i + break + } + } + } + + columnTypes, err := rows.ColumnTypes() + if err != nil { + return err + } + + for ; rows.Next(); rowCount++ { + if rowCount > rowLimit { + return fmt.Errorf("query row limit exceeded, limit %d", rowLimit) + } + + values, err := e.rowTransformer.Transform(columnTypes, rows) + if err != nil { + return err + } + + // converts column named time to unix timestamp in milliseconds + // to make native mssql datetime types and epoch dates work in + // annotation and table queries. + ConvertSqlTimeColumnToEpochMs(values, timeIndex) + table.Rows = append(table.Rows, values) + } + + result.Tables = append(result.Tables, table) + result.Meta.Set("rowCount", rowCount) + return nil +} + +func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error { + pointsBySeries := make(map[string]*TimeSeries) + seriesByQueryOrder := list.New() + + columnNames, err := rows.Columns() + if err != nil { + return err + } + + columnTypes, err := rows.ColumnTypes() + if err != nil { + return err + } + + rowLimit := 1000000 + rowCount := 0 + timeIndex := -1 + metricIndex := -1 + + // check columns of resultset: a column named time is mandatory + // the first text column is treated as metric name unless a column named metric is present + for i, col := range columnNames { + for _, tc := range e.timeColumnNames { + if col == tc { + timeIndex = i + continue + } + } + switch col { + case "metric": + metricIndex = i + default: + if metricIndex == -1 { + columnType := columnTypes[i].DatabaseTypeName() + + for _, mct := range e.metricColumnTypes { + if columnType == mct { + metricIndex = i + continue + } + } + } + } + } + + if timeIndex == -1 { + return fmt.Errorf("Found no column named %s", strings.Join(e.timeColumnNames, " or ")) + } + + fillMissing := query.Model.Get("fill").MustBool(false) + var fillInterval float64 + fillValue := null.Float{} + if fillMissing { + fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000 + if !query.Model.Get("fillNull").MustBool(false) { + fillValue.Float64 = query.Model.Get("fillValue").MustFloat64() + fillValue.Valid = true + } + } + + for rows.Next() { + var timestamp float64 + var value null.Float + var metric string + + if rowCount > rowLimit { + return fmt.Errorf("query row limit exceeded, limit %d", rowLimit) + } + + values, err := e.rowTransformer.Transform(columnTypes, rows) + if err != nil { + return err + } + + // converts column named time to unix timestamp in milliseconds to make + // native mysql datetime types and epoch dates work in + // annotation and table queries. + ConvertSqlTimeColumnToEpochMs(values, timeIndex) + + switch columnValue := values[timeIndex].(type) { + case int64: + timestamp = float64(columnValue) + case float64: + timestamp = columnValue + default: + return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue) + } + + if metricIndex >= 0 { + if columnValue, ok := values[metricIndex].(string); ok { + metric = columnValue + } else { + return fmt.Errorf("Column metric must be of type %s. metric column name: %s type: %s but datatype is %T", strings.Join(e.metricColumnTypes, ", "), columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex]) + } + } + + for i, col := range columnNames { + if i == timeIndex || i == metricIndex { + continue + } + + if value, err = ConvertSqlValueColumnToFloat(col, values[i]); err != nil { + return err + } + + if metricIndex == -1 { + metric = col + } + + series, exist := pointsBySeries[metric] + if !exist { + series = &TimeSeries{Name: metric} + pointsBySeries[metric] = series + seriesByQueryOrder.PushBack(metric) + } + + if fillMissing { + var intervalStart float64 + if !exist { + intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) + } else { + intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval + } + + // align interval start + intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval + + for i := intervalStart; i < timestamp; i += fillInterval { + series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)}) + rowCount++ + } + } + + series.Points = append(series.Points, TimePoint{value, null.FloatFrom(timestamp)}) + + e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value) + } + } + + for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() { + key := elem.Value.(string) + result.Series = append(result.Series, pointsBySeries[key]) + + if fillMissing { + series := pointsBySeries[key] + // fill in values from last fetched value till interval end + intervalStart := series.Points[len(series.Points)-1][1].Float64 + intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6) + + // align interval start + intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval + for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval { + series.Points = append(series.Points, TimePoint{fillValue, null.FloatFrom(i)}) + rowCount++ + } + } + } + + result.Meta.Set("rowCount", rowCount) + return nil +} + // ConvertSqlTimeColumnToEpochMs converts column named time to unix timestamp in milliseconds // to make native datetime types and epoch dates work in annotation and table queries. func ConvertSqlTimeColumnToEpochMs(values RowValues, timeIndex int) { From 2f3851b915620040204919b17b603c5b07a7de1a Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 26 Jul 2018 18:10:17 +0200 Subject: [PATCH 211/263] postgres: use new sql engine --- pkg/tsdb/postgres/macros.go | 38 ++-- pkg/tsdb/postgres/macros_test.go | 2 +- pkg/tsdb/postgres/postgres.go | 269 +++-------------------------- pkg/tsdb/postgres/postgres_test.go | 30 ++-- 4 files changed, 64 insertions(+), 275 deletions(-) diff --git a/pkg/tsdb/postgres/macros.go b/pkg/tsdb/postgres/macros.go index 61e88418ff4b..661dbf3d4cef 100644 --- a/pkg/tsdb/postgres/macros.go +++ b/pkg/tsdb/postgres/macros.go @@ -14,18 +14,18 @@ import ( const rsIdentifier = `([_a-zA-Z0-9]+)` const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` -type PostgresMacroEngine struct { - TimeRange *tsdb.TimeRange - Query *tsdb.Query +type postgresMacroEngine struct { + timeRange *tsdb.TimeRange + query *tsdb.Query } -func NewPostgresMacroEngine() tsdb.SqlMacroEngine { - return &PostgresMacroEngine{} +func newPostgresMacroEngine() tsdb.SqlMacroEngine { + return &postgresMacroEngine{} } -func (m *PostgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { - m.TimeRange = timeRange - m.Query = query +func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { + m.timeRange = timeRange + m.query = query rExp, _ := regexp.Compile(sExpr) var macroError error @@ -66,7 +66,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str return result + str[lastIndex:] } -func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, error) { +func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, error) { switch name { case "__time": if len(args) == 0 { @@ -83,11 +83,11 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeFrom": - return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil case "__timeTo": - return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) @@ -97,16 +97,16 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, return "", fmt.Errorf("error parsing interval %v", args[1]) } if len(args) == 3 { - m.Query.Model.Set("fill", true) - m.Query.Model.Set("fillInterval", interval.Seconds()) + m.query.Model.Set("fill", true) + m.query.Model.Set("fillInterval", interval.Seconds()) if args[2] == "NULL" { - m.Query.Model.Set("fillNull", true) + m.query.Model.Set("fillNull", true) } else { floatVal, err := strconv.ParseFloat(args[2], 64) if err != nil { return "", fmt.Errorf("error parsing fill value %v", args[2]) } - m.Query.Model.Set("fillValue", floatVal) + m.query.Model.Set("fillValue", floatVal) } } return fmt.Sprintf("floor(extract(epoch from %s)/%v)*%v AS time", args[0], interval.Seconds(), interval.Seconds()), nil @@ -114,11 +114,11 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil case "__unixEpochFrom": - return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil + return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil case "__unixEpochTo": - return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil default: return "", fmt.Errorf("Unknown macro %v", name) } diff --git a/pkg/tsdb/postgres/macros_test.go b/pkg/tsdb/postgres/macros_test.go index 8c5818504306..194573be0fd2 100644 --- a/pkg/tsdb/postgres/macros_test.go +++ b/pkg/tsdb/postgres/macros_test.go @@ -12,7 +12,7 @@ import ( func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { - engine := NewPostgresMacroEngine() + engine := newPostgresMacroEngine() query := &tsdb.Query{} Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() { diff --git a/pkg/tsdb/postgres/postgres.go b/pkg/tsdb/postgres/postgres.go index f19e4fb54f4e..b9f333db127b 100644 --- a/pkg/tsdb/postgres/postgres.go +++ b/pkg/tsdb/postgres/postgres.go @@ -1,46 +1,38 @@ package postgres import ( - "container/list" - "context" - "fmt" - "math" + "database/sql" "net/url" "strconv" "github.com/go-xorm/core" - "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" ) -type PostgresQueryEndpoint struct { - sqlEngine tsdb.SqlEngine - log log.Logger -} - func init() { - tsdb.RegisterTsdbQueryEndpoint("postgres", NewPostgresQueryEndpoint) + tsdb.RegisterTsdbQueryEndpoint("postgres", newPostgresQueryEndpoint) } -func NewPostgresQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - endpoint := &PostgresQueryEndpoint{ - log: log.New("tsdb.postgres"), - } - - endpoint.sqlEngine = &tsdb.DefaultSqlEngine{ - MacroEngine: NewPostgresMacroEngine(), - } +func newPostgresQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + logger := log.New("tsdb.postgres") cnnstr := generateConnectionString(datasource) - endpoint.log.Debug("getEngine", "connection", cnnstr) + logger.Debug("getEngine", "connection", cnnstr) - if err := endpoint.sqlEngine.InitEngine("postgres", datasource, cnnstr); err != nil { - return nil, err + config := tsdb.SqlQueryEndpointConfiguration{ + DriverName: "postgres", + ConnectionString: cnnstr, + Datasource: datasource, + MetricColumnTypes: []string{"UNKNOWN", "TEXT", "VARCHAR", "CHAR"}, } - return endpoint, nil + rowTransformer := postgresRowTransformer{ + log: logger, + } + + return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newPostgresMacroEngine(), logger) } func generateConnectionString(datasource *models.DataSource) string { @@ -63,70 +55,15 @@ func generateConnectionString(datasource *models.DataSource) string { return u.String() } -func (e *PostgresQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable) +type postgresRowTransformer struct { + log log.Logger } -func (e PostgresQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { - columnNames, err := rows.Columns() - if err != nil { - return err - } - - table := &tsdb.Table{ - Columns: make([]tsdb.TableColumn, len(columnNames)), - Rows: make([]tsdb.RowValues, 0), - } - - for i, name := range columnNames { - table.Columns[i].Text = name - } - - rowLimit := 1000000 - rowCount := 0 - timeIndex := -1 - - // check if there is a column named time - for i, col := range columnNames { - switch col { - case "time": - timeIndex = i - } - } - - for ; rows.Next(); rowCount++ { - if rowCount > rowLimit { - return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit) - } - - values, err := e.getTypedRowData(rows) - if err != nil { - return err - } - - // converts column named time to unix timestamp in milliseconds to make - // native postgres datetime types and epoch dates work in - // annotation and table queries. - tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) +func (t *postgresRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) { + values := make([]interface{}, len(columnTypes)) + valuePtrs := make([]interface{}, len(columnTypes)) - table.Rows = append(table.Rows, values) - } - - result.Tables = append(result.Tables, table) - result.Meta.Set("rowCount", rowCount) - return nil -} - -func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, error) { - types, err := rows.ColumnTypes() - if err != nil { - return nil, err - } - - values := make([]interface{}, len(types)) - valuePtrs := make([]interface{}, len(types)) - - for i := 0; i < len(types); i++ { + for i := 0; i < len(columnTypes); i++ { valuePtrs[i] = &values[i] } @@ -136,20 +73,20 @@ func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, // convert types not handled by lib/pq // unhandled types are returned as []byte - for i := 0; i < len(types); i++ { + for i := 0; i < len(columnTypes); i++ { if value, ok := values[i].([]byte); ok { - switch types[i].DatabaseTypeName() { + switch columnTypes[i].DatabaseTypeName() { case "NUMERIC": if v, err := strconv.ParseFloat(string(value), 64); err == nil { values[i] = v } else { - e.log.Debug("Rows", "Error converting numeric to float", value) + t.log.Debug("Rows", "Error converting numeric to float", value) } case "UNKNOWN", "CIDR", "INET", "MACADDR": // char literals have type UNKNOWN values[i] = string(value) default: - e.log.Debug("Rows", "Unknown database type", types[i].DatabaseTypeName(), "value", value) + t.log.Debug("Rows", "Unknown database type", columnTypes[i].DatabaseTypeName(), "value", value) values[i] = string(value) } } @@ -157,159 +94,3 @@ func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, return values, nil } - -func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { - pointsBySeries := make(map[string]*tsdb.TimeSeries) - seriesByQueryOrder := list.New() - - columnNames, err := rows.Columns() - if err != nil { - return err - } - - columnTypes, err := rows.ColumnTypes() - if err != nil { - return err - } - - rowLimit := 1000000 - rowCount := 0 - timeIndex := -1 - metricIndex := -1 - - // check columns of resultset: a column named time is mandatory - // the first text column is treated as metric name unless a column named metric is present - for i, col := range columnNames { - switch col { - case "time": - timeIndex = i - case "metric": - metricIndex = i - default: - if metricIndex == -1 { - switch columnTypes[i].DatabaseTypeName() { - case "UNKNOWN", "TEXT", "VARCHAR", "CHAR": - metricIndex = i - } - } - } - } - - if timeIndex == -1 { - return fmt.Errorf("Found no column named time") - } - - fillMissing := query.Model.Get("fill").MustBool(false) - var fillInterval float64 - fillValue := null.Float{} - if fillMissing { - fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000 - if !query.Model.Get("fillNull").MustBool(false) { - fillValue.Float64 = query.Model.Get("fillValue").MustFloat64() - fillValue.Valid = true - } - } - - for rows.Next() { - var timestamp float64 - var value null.Float - var metric string - - if rowCount > rowLimit { - return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit) - } - - values, err := e.getTypedRowData(rows) - if err != nil { - return err - } - - // converts column named time to unix timestamp in milliseconds to make - // native mysql datetime types and epoch dates work in - // annotation and table queries. - tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) - - switch columnValue := values[timeIndex].(type) { - case int64: - timestamp = float64(columnValue) - case float64: - timestamp = columnValue - default: - return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue) - } - - if metricIndex >= 0 { - if columnValue, ok := values[metricIndex].(string); ok { - metric = columnValue - } else { - return fmt.Errorf("Column metric must be of type char,varchar or text, got: %T %v", values[metricIndex], values[metricIndex]) - } - } - - for i, col := range columnNames { - if i == timeIndex || i == metricIndex { - continue - } - - if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil { - return err - } - - if metricIndex == -1 { - metric = col - } - - series, exist := pointsBySeries[metric] - if !exist { - series = &tsdb.TimeSeries{Name: metric} - pointsBySeries[metric] = series - seriesByQueryOrder.PushBack(metric) - } - - if fillMissing { - var intervalStart float64 - if !exist { - intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) - } else { - intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval - } - - // align interval start - intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval - - for i := intervalStart; i < timestamp; i += fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) - rowCount++ - } - } - - series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)}) - - e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value) - rowCount++ - - } - } - - for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() { - key := elem.Value.(string) - result.Series = append(result.Series, pointsBySeries[key]) - - if fillMissing { - series := pointsBySeries[key] - // fill in values from last fetched value till interval end - intervalStart := series.Points[len(series.Points)-1][1].Float64 - intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6) - - // align interval start - intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval - for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) - rowCount++ - } - } - } - - result.Meta.Set("rowCount", rowCount) - return nil -} diff --git a/pkg/tsdb/postgres/postgres_test.go b/pkg/tsdb/postgres/postgres_test.go index a3a6d6546df5..089829bf5901 100644 --- a/pkg/tsdb/postgres/postgres_test.go +++ b/pkg/tsdb/postgres/postgres_test.go @@ -8,8 +8,9 @@ import ( "time" "github.com/go-xorm/xorm" + "github.com/grafana/grafana/pkg/components/securejsondata" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" "github.com/grafana/grafana/pkg/tsdb" @@ -22,8 +23,9 @@ import ( // The tests require a PostgreSQL db named grafanadstest and a user/password grafanatest/grafanatest! // Use the docker/blocks/postgres_tests/docker-compose.yaml to spin up a // preconfigured Postgres server suitable for running these tests. -// There is also a dashboard.json in same directory that you can import to Grafana -// once you've created a datasource for the test server/database. +// There is also a datasource and dashboard provisioned by devenv scripts that you can +// use to verify that the generated data are vizualized as expected, see +// devenv/README.md for setup instructions. func TestPostgres(t *testing.T) { // change to true to run the MySQL tests runPostgresTests := false @@ -36,19 +38,25 @@ func TestPostgres(t *testing.T) { Convey("PostgreSQL", t, func() { x := InitPostgresTestDB(t) - endpoint := &PostgresQueryEndpoint{ - sqlEngine: &tsdb.DefaultSqlEngine{ - MacroEngine: NewPostgresMacroEngine(), - XormEngine: x, - }, - log: log.New("tsdb.postgres"), + origXormEngine := tsdb.NewXormEngine + tsdb.NewXormEngine = func(d, c string) (*xorm.Engine, error) { + return x, nil } - sess := x.NewSession() - defer sess.Close() + endpoint, err := newPostgresQueryEndpoint(&models.DataSource{ + JsonData: simplejson.New(), + SecureJsonData: securejsondata.SecureJsonData{}, + }) + So(err, ShouldBeNil) + sess := x.NewSession() fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) + Reset(func() { + sess.Close() + tsdb.NewXormEngine = origXormEngine + }) + Convey("Given a table with different native data types", func() { sql := ` DROP TABLE IF EXISTS postgres_types; From 27db4540125ae1c5d342319fade4043bc2221081 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 26 Jul 2018 18:10:45 +0200 Subject: [PATCH 212/263] mysql: use new sql engine --- pkg/tsdb/mysql/macros.go | 38 ++--- pkg/tsdb/mysql/macros_test.go | 2 +- pkg/tsdb/mysql/mysql.go | 265 +++------------------------------- pkg/tsdb/mysql/mysql_test.go | 30 ++-- 4 files changed, 61 insertions(+), 274 deletions(-) diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go index 584f731f3b80..078d1ff54f89 100644 --- a/pkg/tsdb/mysql/macros.go +++ b/pkg/tsdb/mysql/macros.go @@ -14,18 +14,18 @@ import ( const rsIdentifier = `([_a-zA-Z0-9]+)` const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` -type MySqlMacroEngine struct { - TimeRange *tsdb.TimeRange - Query *tsdb.Query +type mySqlMacroEngine struct { + timeRange *tsdb.TimeRange + query *tsdb.Query } -func NewMysqlMacroEngine() tsdb.SqlMacroEngine { - return &MySqlMacroEngine{} +func newMysqlMacroEngine() tsdb.SqlMacroEngine { + return &mySqlMacroEngine{} } -func (m *MySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { - m.TimeRange = timeRange - m.Query = query +func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { + m.timeRange = timeRange + m.query = query rExp, _ := regexp.Compile(sExpr) var macroError error @@ -66,7 +66,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str return result + str[lastIndex:] } -func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) { +func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) { switch name { case "__timeEpoch", "__time": if len(args) == 0 { @@ -78,11 +78,11 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeFrom": - return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil case "__timeTo": - return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) @@ -92,16 +92,16 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("error parsing interval %v", args[1]) } if len(args) == 3 { - m.Query.Model.Set("fill", true) - m.Query.Model.Set("fillInterval", interval.Seconds()) + m.query.Model.Set("fill", true) + m.query.Model.Set("fillInterval", interval.Seconds()) if args[2] == "NULL" { - m.Query.Model.Set("fillNull", true) + m.query.Model.Set("fillNull", true) } else { floatVal, err := strconv.ParseFloat(args[2], 64) if err != nil { return "", fmt.Errorf("error parsing fill value %v", args[2]) } - m.Query.Model.Set("fillValue", floatVal) + m.query.Model.Set("fillValue", floatVal) } } return fmt.Sprintf("UNIX_TIMESTAMP(%s) DIV %.0f * %.0f", args[0], interval.Seconds(), interval.Seconds()), nil @@ -109,11 +109,11 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil case "__unixEpochFrom": - return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil + return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil case "__unixEpochTo": - return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil default: return "", fmt.Errorf("Unknown macro %v", name) } diff --git a/pkg/tsdb/mysql/macros_test.go b/pkg/tsdb/mysql/macros_test.go index 2561661b3859..003af9a737fa 100644 --- a/pkg/tsdb/mysql/macros_test.go +++ b/pkg/tsdb/mysql/macros_test.go @@ -12,7 +12,7 @@ import ( func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { - engine := &MySqlMacroEngine{} + engine := &mySqlMacroEngine{} query := &tsdb.Query{} Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() { diff --git a/pkg/tsdb/mysql/mysql.go b/pkg/tsdb/mysql/mysql.go index 7eceaffdb09d..645f6b49bbb1 100644 --- a/pkg/tsdb/mysql/mysql.go +++ b/pkg/tsdb/mysql/mysql.go @@ -1,39 +1,24 @@ package mysql import ( - "container/list" - "context" "database/sql" "fmt" - "math" "reflect" "strconv" "github.com/go-sql-driver/mysql" "github.com/go-xorm/core" - "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" ) -type MysqlQueryEndpoint struct { - sqlEngine tsdb.SqlEngine - log log.Logger -} - func init() { - tsdb.RegisterTsdbQueryEndpoint("mysql", NewMysqlQueryEndpoint) + tsdb.RegisterTsdbQueryEndpoint("mysql", newMysqlQueryEndpoint) } -func NewMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - endpoint := &MysqlQueryEndpoint{ - log: log.New("tsdb.mysql"), - } - - endpoint.sqlEngine = &tsdb.DefaultSqlEngine{ - MacroEngine: NewMysqlMacroEngine(), - } +func newMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + logger := log.New("tsdb.mysql") cnnstr := fmt.Sprintf("%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&parseTime=true&loc=UTC&allowNativePasswords=true", datasource.User, @@ -42,85 +27,35 @@ func NewMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoin datasource.Url, datasource.Database, ) - endpoint.log.Debug("getEngine", "connection", cnnstr) + logger.Debug("getEngine", "connection", cnnstr) - if err := endpoint.sqlEngine.InitEngine("mysql", datasource, cnnstr); err != nil { - return nil, err + config := tsdb.SqlQueryEndpointConfiguration{ + DriverName: "mysql", + ConnectionString: cnnstr, + Datasource: datasource, + TimeColumnNames: []string{"time", "time_sec"}, + MetricColumnTypes: []string{"CHAR", "VARCHAR", "TINYTEXT", "TEXT", "MEDIUMTEXT", "LONGTEXT"}, } - return endpoint, nil -} - -// Query is the main function for the MysqlExecutor -func (e *MysqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable) -} - -func (e MysqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { - columnNames, err := rows.Columns() - columnCount := len(columnNames) - - if err != nil { - return err + rowTransformer := mysqlRowTransformer{ + log: logger, } - table := &tsdb.Table{ - Columns: make([]tsdb.TableColumn, columnCount), - Rows: make([]tsdb.RowValues, 0), - } - - for i, name := range columnNames { - table.Columns[i].Text = name - } - - rowLimit := 1000000 - rowCount := 0 - timeIndex := -1 - - // check if there is a column named time - for i, col := range columnNames { - switch col { - case "time", "time_sec": - timeIndex = i - } - } - - for ; rows.Next(); rowCount++ { - if rowCount > rowLimit { - return fmt.Errorf("MySQL query row limit exceeded, limit %d", rowLimit) - } - - values, err := e.getTypedRowData(rows) - if err != nil { - return err - } - - // converts column named time to unix timestamp in milliseconds to make - // native mysql datetime types and epoch dates work in - // annotation and table queries. - tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) - - table.Rows = append(table.Rows, values) - } - - result.Tables = append(result.Tables, table) - result.Meta.Set("rowCount", rowCount) - return nil + return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newMysqlMacroEngine(), logger) } -func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, error) { - types, err := rows.ColumnTypes() - if err != nil { - return nil, err - } +type mysqlRowTransformer struct { + log log.Logger +} - values := make([]interface{}, len(types)) +func (t *mysqlRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) { + values := make([]interface{}, len(columnTypes)) for i := range values { - scanType := types[i].ScanType() + scanType := columnTypes[i].ScanType() values[i] = reflect.New(scanType).Interface() - if types[i].DatabaseTypeName() == "BIT" { + if columnTypes[i].DatabaseTypeName() == "BIT" { values[i] = new([]byte) } } @@ -129,7 +64,7 @@ func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, er return nil, err } - for i := 0; i < len(types); i++ { + for i := 0; i < len(columnTypes); i++ { typeName := reflect.ValueOf(values[i]).Type().String() switch typeName { @@ -158,7 +93,7 @@ func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, er } } - if types[i].DatabaseTypeName() == "DECIMAL" { + if columnTypes[i].DatabaseTypeName() == "DECIMAL" { f, err := strconv.ParseFloat(values[i].(string), 64) if err == nil { @@ -171,159 +106,3 @@ func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, er return values, nil } - -func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { - pointsBySeries := make(map[string]*tsdb.TimeSeries) - seriesByQueryOrder := list.New() - - columnNames, err := rows.Columns() - if err != nil { - return err - } - - columnTypes, err := rows.ColumnTypes() - if err != nil { - return err - } - - rowLimit := 1000000 - rowCount := 0 - timeIndex := -1 - metricIndex := -1 - - // check columns of resultset: a column named time is mandatory - // the first text column is treated as metric name unless a column named metric is present - for i, col := range columnNames { - switch col { - case "time", "time_sec": - timeIndex = i - case "metric": - metricIndex = i - default: - if metricIndex == -1 { - switch columnTypes[i].DatabaseTypeName() { - case "CHAR", "VARCHAR", "TINYTEXT", "TEXT", "MEDIUMTEXT", "LONGTEXT": - metricIndex = i - } - } - } - } - - if timeIndex == -1 { - return fmt.Errorf("Found no column named time or time_sec") - } - - fillMissing := query.Model.Get("fill").MustBool(false) - var fillInterval float64 - fillValue := null.Float{} - if fillMissing { - fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000 - if !query.Model.Get("fillNull").MustBool(false) { - fillValue.Float64 = query.Model.Get("fillValue").MustFloat64() - fillValue.Valid = true - } - } - - for rows.Next() { - var timestamp float64 - var value null.Float - var metric string - - if rowCount > rowLimit { - return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit) - } - - values, err := e.getTypedRowData(rows) - if err != nil { - return err - } - - // converts column named time to unix timestamp in milliseconds to make - // native mysql datetime types and epoch dates work in - // annotation and table queries. - tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) - - switch columnValue := values[timeIndex].(type) { - case int64: - timestamp = float64(columnValue) - case float64: - timestamp = columnValue - default: - return fmt.Errorf("Invalid type for column time/time_sec, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue) - } - - if metricIndex >= 0 { - if columnValue, ok := values[metricIndex].(string); ok { - metric = columnValue - } else { - return fmt.Errorf("Column metric must be of type char,varchar or text, got: %T %v", values[metricIndex], values[metricIndex]) - } - } - - for i, col := range columnNames { - if i == timeIndex || i == metricIndex { - continue - } - - if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil { - return err - } - - if metricIndex == -1 { - metric = col - } - - series, exist := pointsBySeries[metric] - if !exist { - series = &tsdb.TimeSeries{Name: metric} - pointsBySeries[metric] = series - seriesByQueryOrder.PushBack(metric) - } - - if fillMissing { - var intervalStart float64 - if !exist { - intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) - } else { - intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval - } - - // align interval start - intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval - - for i := intervalStart; i < timestamp; i += fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) - rowCount++ - } - } - - series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)}) - - e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value) - rowCount++ - - } - } - - for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() { - key := elem.Value.(string) - result.Series = append(result.Series, pointsBySeries[key]) - - if fillMissing { - series := pointsBySeries[key] - // fill in values from last fetched value till interval end - intervalStart := series.Points[len(series.Points)-1][1].Float64 - intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6) - - // align interval start - intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval - for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) - rowCount++ - } - } - } - - result.Meta.Set("rowCount", rowCount) - return nil -} diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go index 850a37617e28..3b4e283b726b 100644 --- a/pkg/tsdb/mysql/mysql_test.go +++ b/pkg/tsdb/mysql/mysql_test.go @@ -8,8 +8,9 @@ import ( "time" "github.com/go-xorm/xorm" + "github.com/grafana/grafana/pkg/components/securejsondata" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" "github.com/grafana/grafana/pkg/tsdb" @@ -21,8 +22,9 @@ import ( // The tests require a MySQL db named grafana_ds_tests and a user/password grafana/password // Use the docker/blocks/mysql_tests/docker-compose.yaml to spin up a // preconfigured MySQL server suitable for running these tests. -// There is also a dashboard.json in same directory that you can import to Grafana -// once you've created a datasource for the test server/database. +// There is also a datasource and dashboard provisioned by devenv scripts that you can +// use to verify that the generated data are vizualized as expected, see +// devenv/README.md for setup instructions. func TestMySQL(t *testing.T) { // change to true to run the MySQL tests runMySqlTests := false @@ -35,19 +37,25 @@ func TestMySQL(t *testing.T) { Convey("MySQL", t, func() { x := InitMySQLTestDB(t) - endpoint := &MysqlQueryEndpoint{ - sqlEngine: &tsdb.DefaultSqlEngine{ - MacroEngine: NewMysqlMacroEngine(), - XormEngine: x, - }, - log: log.New("tsdb.mysql"), + origXormEngine := tsdb.NewXormEngine + tsdb.NewXormEngine = func(d, c string) (*xorm.Engine, error) { + return x, nil } - sess := x.NewSession() - defer sess.Close() + endpoint, err := newMysqlQueryEndpoint(&models.DataSource{ + JsonData: simplejson.New(), + SecureJsonData: securejsondata.SecureJsonData{}, + }) + So(err, ShouldBeNil) + sess := x.NewSession() fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC) + Reset(func() { + sess.Close() + tsdb.NewXormEngine = origXormEngine + }) + Convey("Given a table with different native data types", func() { if exists, err := sess.IsTableExist("mysql_types"); err != nil || exists { So(err, ShouldBeNil) From 4f7882cda2b3443e473caf426a321841b223a8ab Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 26 Jul 2018 18:11:10 +0200 Subject: [PATCH 213/263] mssql: use new sql engine --- pkg/tsdb/mssql/macros.go | 38 ++--- pkg/tsdb/mssql/macros_test.go | 2 +- pkg/tsdb/mssql/mssql.go | 268 ++++------------------------------ pkg/tsdb/mssql/mssql_test.go | 30 ++-- 4 files changed, 64 insertions(+), 274 deletions(-) diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go index ad3d1edd5d71..2c16b5cb27f1 100644 --- a/pkg/tsdb/mssql/macros.go +++ b/pkg/tsdb/mssql/macros.go @@ -14,18 +14,18 @@ import ( const rsIdentifier = `([_a-zA-Z0-9]+)` const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` -type MsSqlMacroEngine struct { - TimeRange *tsdb.TimeRange - Query *tsdb.Query +type msSqlMacroEngine struct { + timeRange *tsdb.TimeRange + query *tsdb.Query } -func NewMssqlMacroEngine() tsdb.SqlMacroEngine { - return &MsSqlMacroEngine{} +func newMssqlMacroEngine() tsdb.SqlMacroEngine { + return &msSqlMacroEngine{} } -func (m *MsSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { - m.TimeRange = timeRange - m.Query = query +func (m *msSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { + m.timeRange = timeRange + m.query = query rExp, _ := regexp.Compile(sExpr) var macroError error @@ -66,7 +66,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str return result + str[lastIndex:] } -func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, error) { +func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, error) { switch name { case "__time": if len(args) == 0 { @@ -83,11 +83,11 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeFrom": - return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("'%s'", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil case "__timeTo": - return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("'%s'", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) @@ -97,16 +97,16 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er return "", fmt.Errorf("error parsing interval %v", args[1]) } if len(args) == 3 { - m.Query.Model.Set("fill", true) - m.Query.Model.Set("fillInterval", interval.Seconds()) + m.query.Model.Set("fill", true) + m.query.Model.Set("fillInterval", interval.Seconds()) if args[2] == "NULL" { - m.Query.Model.Set("fillNull", true) + m.query.Model.Set("fillNull", true) } else { floatVal, err := strconv.ParseFloat(args[2], 64) if err != nil { return "", fmt.Errorf("error parsing fill value %v", args[2]) } - m.Query.Model.Set("fillValue", floatVal) + m.query.Model.Set("fillValue", floatVal) } } return fmt.Sprintf("FLOOR(DATEDIFF(second, '1970-01-01', %s)/%.0f)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil @@ -114,11 +114,11 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.timeRange.GetFromAsSecondsEpoch(), args[0], m.timeRange.GetToAsSecondsEpoch()), nil case "__unixEpochFrom": - return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil + return fmt.Sprintf("%d", m.timeRange.GetFromAsSecondsEpoch()), nil case "__unixEpochTo": - return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("%d", m.timeRange.GetToAsSecondsEpoch()), nil default: return "", fmt.Errorf("Unknown macro %v", name) } diff --git a/pkg/tsdb/mssql/macros_test.go b/pkg/tsdb/mssql/macros_test.go index 49368fe36311..1895cd994424 100644 --- a/pkg/tsdb/mssql/macros_test.go +++ b/pkg/tsdb/mssql/macros_test.go @@ -14,7 +14,7 @@ import ( func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { - engine := &MsSqlMacroEngine{} + engine := &msSqlMacroEngine{} query := &tsdb.Query{ Model: simplejson.New(), } diff --git a/pkg/tsdb/mssql/mssql.go b/pkg/tsdb/mssql/mssql.go index eb71259b46be..72e57d03fa02 100644 --- a/pkg/tsdb/mssql/mssql.go +++ b/pkg/tsdb/mssql/mssql.go @@ -1,49 +1,40 @@ package mssql import ( - "container/list" - "context" "database/sql" "fmt" "strconv" "strings" - "math" - _ "github.com/denisenkom/go-mssqldb" "github.com/go-xorm/core" - "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" ) -type MssqlQueryEndpoint struct { - sqlEngine tsdb.SqlEngine - log log.Logger -} - func init() { - tsdb.RegisterTsdbQueryEndpoint("mssql", NewMssqlQueryEndpoint) + tsdb.RegisterTsdbQueryEndpoint("mssql", newMssqlQueryEndpoint) } -func NewMssqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - endpoint := &MssqlQueryEndpoint{ - log: log.New("tsdb.mssql"), - } - - endpoint.sqlEngine = &tsdb.DefaultSqlEngine{ - MacroEngine: NewMssqlMacroEngine(), - } +func newMssqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + logger := log.New("tsdb.mssql") cnnstr := generateConnectionString(datasource) - endpoint.log.Debug("getEngine", "connection", cnnstr) + logger.Debug("getEngine", "connection", cnnstr) - if err := endpoint.sqlEngine.InitEngine("mssql", datasource, cnnstr); err != nil { - return nil, err + config := tsdb.SqlQueryEndpointConfiguration{ + DriverName: "mssql", + ConnectionString: cnnstr, + Datasource: datasource, + MetricColumnTypes: []string{"VARCHAR", "CHAR", "NVARCHAR", "NCHAR"}, + } + + rowTransformer := mssqlRowTransformer{ + log: logger, } - return endpoint, nil + return tsdb.NewSqlQueryEndpoint(&config, &rowTransformer, newMssqlMacroEngine(), logger) } func generateConnectionString(datasource *models.DataSource) string { @@ -70,71 +61,16 @@ func generateConnectionString(datasource *models.DataSource) string { ) } -// Query is the main function for the MssqlQueryEndpoint -func (e *MssqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable) -} - -func (e MssqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { - columnNames, err := rows.Columns() - columnCount := len(columnNames) - - if err != nil { - return err - } - - rowLimit := 1000000 - rowCount := 0 - timeIndex := -1 - - table := &tsdb.Table{ - Columns: make([]tsdb.TableColumn, columnCount), - Rows: make([]tsdb.RowValues, 0), - } - - for i, name := range columnNames { - table.Columns[i].Text = name - - // check if there is a column named time - switch name { - case "time": - timeIndex = i - } - } - - columnTypes, err := rows.ColumnTypes() - if err != nil { - return err - } - - for ; rows.Next(); rowCount++ { - if rowCount > rowLimit { - return fmt.Errorf("MsSQL query row limit exceeded, limit %d", rowLimit) - } - - values, err := e.getTypedRowData(columnTypes, rows) - if err != nil { - return err - } - - // converts column named time to unix timestamp in milliseconds - // to make native mssql datetime types and epoch dates work in - // annotation and table queries. - tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) - table.Rows = append(table.Rows, values) - } - - result.Tables = append(result.Tables, table) - result.Meta.Set("rowCount", rowCount) - return nil +type mssqlRowTransformer struct { + log log.Logger } -func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) { - values := make([]interface{}, len(types)) - valuePtrs := make([]interface{}, len(types)) +func (t *mssqlRowTransformer) Transform(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) { + values := make([]interface{}, len(columnTypes)) + valuePtrs := make([]interface{}, len(columnTypes)) - for i, stype := range types { - e.log.Debug("type", "type", stype) + for i, stype := range columnTypes { + t.log.Debug("type", "type", stype) valuePtrs[i] = &values[i] } @@ -144,17 +80,17 @@ func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core. // convert types not handled by denisenkom/go-mssqldb // unhandled types are returned as []byte - for i := 0; i < len(types); i++ { + for i := 0; i < len(columnTypes); i++ { if value, ok := values[i].([]byte); ok { - switch types[i].DatabaseTypeName() { + switch columnTypes[i].DatabaseTypeName() { case "MONEY", "SMALLMONEY", "DECIMAL": if v, err := strconv.ParseFloat(string(value), 64); err == nil { values[i] = v } else { - e.log.Debug("Rows", "Error converting numeric to float", value) + t.log.Debug("Rows", "Error converting numeric to float", value) } default: - e.log.Debug("Rows", "Unknown database type", types[i].DatabaseTypeName(), "value", value) + t.log.Debug("Rows", "Unknown database type", columnTypes[i].DatabaseTypeName(), "value", value) values[i] = string(value) } } @@ -162,157 +98,3 @@ func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core. return values, nil } - -func (e MssqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { - pointsBySeries := make(map[string]*tsdb.TimeSeries) - seriesByQueryOrder := list.New() - - columnNames, err := rows.Columns() - if err != nil { - return err - } - - columnTypes, err := rows.ColumnTypes() - if err != nil { - return err - } - - rowLimit := 1000000 - rowCount := 0 - timeIndex := -1 - metricIndex := -1 - - // check columns of resultset: a column named time is mandatory - // the first text column is treated as metric name unless a column named metric is present - for i, col := range columnNames { - switch col { - case "time": - timeIndex = i - case "metric": - metricIndex = i - default: - if metricIndex == -1 { - switch columnTypes[i].DatabaseTypeName() { - case "VARCHAR", "CHAR", "NVARCHAR", "NCHAR": - metricIndex = i - } - } - } - } - - if timeIndex == -1 { - return fmt.Errorf("Found no column named time") - } - - fillMissing := query.Model.Get("fill").MustBool(false) - var fillInterval float64 - fillValue := null.Float{} - if fillMissing { - fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000 - if !query.Model.Get("fillNull").MustBool(false) { - fillValue.Float64 = query.Model.Get("fillValue").MustFloat64() - fillValue.Valid = true - } - } - - for rows.Next() { - var timestamp float64 - var value null.Float - var metric string - - if rowCount > rowLimit { - return fmt.Errorf("MSSQL query row limit exceeded, limit %d", rowLimit) - } - - values, err := e.getTypedRowData(columnTypes, rows) - if err != nil { - return err - } - - // converts column named time to unix timestamp in milliseconds to make - // native mysql datetime types and epoch dates work in - // annotation and table queries. - tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) - - switch columnValue := values[timeIndex].(type) { - case int64: - timestamp = float64(columnValue) - case float64: - timestamp = columnValue - default: - return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue) - } - - if metricIndex >= 0 { - if columnValue, ok := values[metricIndex].(string); ok { - metric = columnValue - } else { - return fmt.Errorf("Column metric must be of type CHAR, VARCHAR, NCHAR or NVARCHAR. metric column name: %s type: %s but datatype is %T", columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex]) - } - } - - for i, col := range columnNames { - if i == timeIndex || i == metricIndex { - continue - } - - if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil { - return err - } - - if metricIndex == -1 { - metric = col - } - - series, exist := pointsBySeries[metric] - if !exist { - series = &tsdb.TimeSeries{Name: metric} - pointsBySeries[metric] = series - seriesByQueryOrder.PushBack(metric) - } - - if fillMissing { - var intervalStart float64 - if !exist { - intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) - } else { - intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval - } - - // align interval start - intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval - - for i := intervalStart; i < timestamp; i += fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) - rowCount++ - } - } - - series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)}) - - e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value) - } - } - - for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() { - key := elem.Value.(string) - result.Series = append(result.Series, pointsBySeries[key]) - - if fillMissing { - series := pointsBySeries[key] - // fill in values from last fetched value till interval end - intervalStart := series.Points[len(series.Points)-1][1].Float64 - intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6) - - // align interval start - intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval - for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) - rowCount++ - } - } - } - - result.Meta.Set("rowCount", rowCount) - return nil -} diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go index db04d6d1f023..86484cb9d5e4 100644 --- a/pkg/tsdb/mssql/mssql_test.go +++ b/pkg/tsdb/mssql/mssql_test.go @@ -8,8 +8,9 @@ import ( "time" "github.com/go-xorm/xorm" + "github.com/grafana/grafana/pkg/components/securejsondata" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" "github.com/grafana/grafana/pkg/tsdb" . "github.com/smartystreets/goconvey/convey" @@ -19,8 +20,9 @@ import ( // The tests require a MSSQL db named grafanatest and a user/password grafana/Password! // Use the docker/blocks/mssql_tests/docker-compose.yaml to spin up a // preconfigured MSSQL server suitable for running these tests. -// There is also a dashboard.json in same directory that you can import to Grafana -// once you've created a datasource for the test server/database. +// There is also a datasource and dashboard provisioned by devenv scripts that you can +// use to verify that the generated data are vizualized as expected, see +// devenv/README.md for setup instructions. // If needed, change the variable below to the IP address of the database. var serverIP = "localhost" @@ -28,19 +30,25 @@ func TestMSSQL(t *testing.T) { SkipConvey("MSSQL", t, func() { x := InitMSSQLTestDB(t) - endpoint := &MssqlQueryEndpoint{ - sqlEngine: &tsdb.DefaultSqlEngine{ - MacroEngine: NewMssqlMacroEngine(), - XormEngine: x, - }, - log: log.New("tsdb.mssql"), + origXormEngine := tsdb.NewXormEngine + tsdb.NewXormEngine = func(d, c string) (*xorm.Engine, error) { + return x, nil } - sess := x.NewSession() - defer sess.Close() + endpoint, err := newMssqlQueryEndpoint(&models.DataSource{ + JsonData: simplejson.New(), + SecureJsonData: securejsondata.SecureJsonData{}, + }) + So(err, ShouldBeNil) + sess := x.NewSession() fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) + Reset(func() { + sess.Close() + tsdb.NewXormEngine = origXormEngine + }) + Convey("Given a table with different native data types", func() { sql := ` IF OBJECT_ID('dbo.[mssql_types]', 'U') IS NOT NULL From 318b8c5a2346d60ede4fe2f01ffb0f665501709c Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 26 Jul 2018 18:12:00 +0200 Subject: [PATCH 214/263] update devenv datasources and dashboards for sql datasources Removed dashboards from docker blocks --- devenv/datasources.yaml | 28 +++- .../datasource_tests_mssql_fakedata.json | 79 ++++------ .../datasource_tests_mssql_unittest.json | 142 ++++++++---------- .../datasource_tests_mysql_fakedata.json | 68 +++------ .../datasource_tests_mysql_unittest.json | 136 ++++++++--------- .../datasource_tests_postgres_fakedata.json | 88 +++++------ .../datasource_tests_postgres_unittest.json | 142 ++++++++---------- 7 files changed, 306 insertions(+), 377 deletions(-) rename docker/blocks/mssql/dashboard.json => devenv/dev-dashboards/datasource_tests_mssql_fakedata.json (92%) rename docker/blocks/mssql_tests/dashboard.json => devenv/dev-dashboards/datasource_tests_mssql_unittest.json (96%) rename docker/blocks/mysql/dashboard.json => devenv/dev-dashboards/datasource_tests_mysql_fakedata.json (92%) rename docker/blocks/mysql_tests/dashboard.json => devenv/dev-dashboards/datasource_tests_mysql_unittest.json (96%) rename docker/blocks/postgres/dashboard.json => devenv/dev-dashboards/datasource_tests_postgres_fakedata.json (91%) rename docker/blocks/postgres_tests/dashboard.json => devenv/dev-dashboards/datasource_tests_postgres_unittest.json (95%) diff --git a/devenv/datasources.yaml b/devenv/datasources.yaml index 241381097b12..a4e9bf056410 100644 --- a/devenv/datasources.yaml +++ b/devenv/datasources.yaml @@ -51,12 +51,28 @@ datasources: user: grafana password: password + - name: gdev-mysql-ds-tests + type: mysql + url: localhost:3306 + database: grafana_ds_tests + user: grafana + password: password + - name: gdev-mssql type: mssql url: localhost:1433 database: grafana user: grafana - password: "Password!" + secureJsonData: + password: Password! + + - name: gdev-mssql-ds-tests + type: mssql + url: localhost:1433 + database: grafanatest + user: grafana + secureJsonData: + password: Password! - name: gdev-postgres type: postgres @@ -68,6 +84,16 @@ datasources: jsonData: sslmode: "disable" + - name: gdev-postgres-ds-tests + type: postgres + url: localhost:5432 + database: grafanadstest + user: grafanatest + secureJsonData: + password: grafanatest + jsonData: + sslmode: "disable" + - name: gdev-cloudwatch type: cloudwatch editable: true diff --git a/docker/blocks/mssql/dashboard.json b/devenv/dev-dashboards/datasource_tests_mssql_fakedata.json similarity index 92% rename from docker/blocks/mssql/dashboard.json rename to devenv/dev-dashboards/datasource_tests_mssql_fakedata.json index ce9aa141a750..4350b5e44a82 100644 --- a/docker/blocks/mssql/dashboard.json +++ b/devenv/dev-dashboards/datasource_tests_mssql_fakedata.json @@ -1,40 +1,4 @@ { - "__inputs": [ - { - "name": "DS_MSSQL", - "label": "MSSQL", - "description": "", - "type": "datasource", - "pluginId": "mssql", - "pluginName": "MSSQL" - } - ], - "__requires": [ - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "5.0.0" - }, - { - "type": "datasource", - "id": "mssql", - "name": "MSSQL", - "version": "1.0.0" - }, - { - "type": "panel", - "id": "table", - "name": "Table", - "version": "5.0.0" - } - ], "annotations": { "list": [ { @@ -52,8 +16,8 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": null, - "iteration": 1520976748896, + "id": 203, + "iteration": 1532618661457, "links": [], "panels": [ { @@ -63,7 +27,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL}", + "datasource": "gdev-mssql", "fill": 2, "gridPos": { "h": 9, @@ -149,14 +113,18 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL}", + "datasource": "gdev-mssql", "fill": 2, "gridPos": { "h": 18, @@ -234,14 +202,18 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL}", + "datasource": "gdev-mssql", "fill": 2, "gridPos": { "h": 9, @@ -313,11 +285,15 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "columns": [], - "datasource": "${DS_MSSQL}", + "datasource": "gdev-mssql", "fontSize": "100%", "gridPos": { "h": 10, @@ -371,13 +347,13 @@ ], "schemaVersion": 16, "style": "dark", - "tags": [], + "tags": ["gdev", "mssql", "fake-data-gen"], "templating": { "list": [ { "allValue": null, "current": {}, - "datasource": "${DS_MSSQL}", + "datasource": "gdev-mssql", "hide": 0, "includeAll": false, "label": "Datacenter", @@ -387,6 +363,7 @@ "query": "SELECT DISTINCT datacenter FROM grafana_metric", "refresh": 1, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -397,7 +374,7 @@ { "allValue": null, "current": {}, - "datasource": "${DS_MSSQL}", + "datasource": "gdev-mssql", "hide": 0, "includeAll": true, "label": "Hostname", @@ -407,6 +384,7 @@ "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'", "refresh": 1, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -499,6 +477,7 @@ ], "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d", "refresh": 2, + "skipUrlSync": false, "type": "interval" } ] @@ -533,7 +512,7 @@ ] }, "timezone": "", - "title": "Grafana Fake Data Gen - MSSQL", + "title": "Datasource tests - MSSQL", "uid": "86Js1xRmk", - "version": 11 + "version": 1 } \ No newline at end of file diff --git a/docker/blocks/mssql_tests/dashboard.json b/devenv/dev-dashboards/datasource_tests_mssql_unittest.json similarity index 96% rename from docker/blocks/mssql_tests/dashboard.json rename to devenv/dev-dashboards/datasource_tests_mssql_unittest.json index 80994254093a..5c8eb8243a30 100644 --- a/docker/blocks/mssql_tests/dashboard.json +++ b/devenv/dev-dashboards/datasource_tests_mssql_unittest.json @@ -1,40 +1,4 @@ { - "__inputs": [ - { - "name": "DS_MSSQL_TEST", - "label": "MSSQL Test", - "description": "", - "type": "datasource", - "pluginId": "mssql", - "pluginName": "Microsoft SQL Server" - } - ], - "__requires": [ - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "5.0.0" - }, - { - "type": "datasource", - "id": "mssql", - "name": "Microsoft SQL Server", - "version": "1.0.0" - }, - { - "type": "panel", - "id": "table", - "name": "Table", - "version": "5.0.0" - } - ], "annotations": { "list": [ { @@ -47,7 +11,7 @@ "type": "dashboard" }, { - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "enable": false, "hide": false, "iconColor": "#6ed0e0", @@ -59,7 +23,7 @@ "type": "tags" }, { - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "enable": false, "hide": false, "iconColor": "rgba(255, 96, 96, 1)", @@ -71,7 +35,7 @@ "type": "tags" }, { - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "enable": false, "hide": false, "iconColor": "#7eb26d", @@ -83,7 +47,7 @@ "type": "tags" }, { - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "enable": false, "hide": false, "iconColor": "#1f78c1", @@ -96,16 +60,17 @@ } ] }, + "description": "Run the mssql unit tests to generate the data backing this dashboard", "editable": true, "gnetId": null, "graphTooltip": 0, - "id": null, - "iteration": 1523320861623, + "id": 35, + "iteration": 1532618879985, "links": [], "panels": [ { "columns": [], - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fontSize": "100%", "gridPos": { "h": 4, @@ -152,7 +117,7 @@ }, { "columns": [], - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -206,7 +171,7 @@ }, { "columns": [], - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -260,7 +225,7 @@ }, { "columns": [], - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -314,7 +279,7 @@ }, { "columns": [], - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -371,7 +336,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -454,7 +419,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -537,7 +502,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -620,7 +585,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -703,7 +668,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -786,7 +751,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -869,7 +834,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -962,7 +927,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1065,7 +1030,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1158,7 +1123,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1243,7 +1208,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1336,7 +1301,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1421,7 +1386,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1514,7 +1479,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1599,7 +1564,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1686,7 +1651,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1773,7 +1738,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1867,7 +1832,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1954,7 +1919,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2048,7 +2013,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2135,7 +2100,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2229,7 +2194,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2316,7 +2281,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2410,7 +2375,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "gdev-mssql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2496,22 +2461,44 @@ "refresh": false, "schemaVersion": 16, "style": "dark", - "tags": [], + "tags": ["gdev", "mssql"], "templating": { "list": [ { "allValue": "'ALL'", - "current": {}, - "datasource": "${DS_MSSQL_TEST}", + "current": { + "selected": true, + "tags": [], + "text": "All", + "value": "$__all" + }, + "datasource": "gdev-mssql-ds-tests", "hide": 0, "includeAll": true, "label": "Metric", "multi": false, "name": "metric", - "options": [], + "options": [ + { + "selected": true, + "text": "All", + "value": "$__all" + }, + { + "selected": false, + "text": "Metric A", + "value": "Metric A" + }, + { + "selected": false, + "text": "Metric B", + "value": "Metric B" + } + ], "query": "SELECT DISTINCT measurement FROM metric_values", - "refresh": 1, + "refresh": 0, "regex": "", + "skipUrlSync": false, "sort": 0, "tagValuesQuery": "", "tags": [], @@ -2564,6 +2551,7 @@ ], "query": "1s,10s,30s,1m,5m,10m", "refresh": 2, + "skipUrlSync": false, "type": "interval" } ] @@ -2598,7 +2586,7 @@ ] }, "timezone": "", - "title": "Microsoft SQL Server Data Source Test", + "title": "Datasource tests - MSSQL (unit test)", "uid": "GlAqcPgmz", "version": 58 } \ No newline at end of file diff --git a/docker/blocks/mysql/dashboard.json b/devenv/dev-dashboards/datasource_tests_mysql_fakedata.json similarity index 92% rename from docker/blocks/mysql/dashboard.json rename to devenv/dev-dashboards/datasource_tests_mysql_fakedata.json index dba7847cc728..cef8fd4783f8 100644 --- a/docker/blocks/mysql/dashboard.json +++ b/devenv/dev-dashboards/datasource_tests_mysql_fakedata.json @@ -1,40 +1,4 @@ { - "__inputs": [ - { - "name": "DS_MYSQL", - "label": "MySQL", - "description": "", - "type": "datasource", - "pluginId": "mysql", - "pluginName": "MySQL" - } - ], - "__requires": [ - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "5.0.0" - }, - { - "type": "datasource", - "id": "mysql", - "name": "MySQL", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "table", - "name": "Table", - "version": "5.0.0" - } - ], "annotations": { "list": [ { @@ -52,8 +16,8 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": null, - "iteration": 1523372133566, + "id": 4, + "iteration": 1532620738041, "links": [], "panels": [ { @@ -63,7 +27,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL}", + "datasource": "gdev-mysql", "fill": 2, "gridPos": { "h": 9, @@ -161,7 +125,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL}", + "datasource": "gdev-mysql", "fill": 2, "gridPos": { "h": 18, @@ -251,7 +215,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL}", + "datasource": "gdev-mysql", "fill": 2, "gridPos": { "h": 9, @@ -332,7 +296,7 @@ }, { "columns": [], - "datasource": "${DS_MYSQL}", + "datasource": "gdev-mysql", "fontSize": "100%", "gridPos": { "h": 9, @@ -390,6 +354,7 @@ "schemaVersion": 16, "style": "dark", "tags": [ + "gdev", "fake-data-gen", "mysql" ], @@ -397,8 +362,11 @@ "list": [ { "allValue": null, - "current": {}, - "datasource": "${DS_MYSQL}", + "current": { + "text": "America", + "value": "America" + }, + "datasource": "gdev-mysql", "hide": 0, "includeAll": false, "label": "Datacenter", @@ -408,6 +376,7 @@ "query": "SELECT DISTINCT datacenter FROM grafana_metric", "refresh": 1, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -417,8 +386,11 @@ }, { "allValue": null, - "current": {}, - "datasource": "${DS_MYSQL}", + "current": { + "text": "All", + "value": "$__all" + }, + "datasource": "gdev-mysql", "hide": 0, "includeAll": true, "label": "Hostname", @@ -428,6 +400,7 @@ "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'", "refresh": 1, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -520,6 +493,7 @@ ], "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d", "refresh": 2, + "skipUrlSync": false, "type": "interval" } ] @@ -554,7 +528,7 @@ ] }, "timezone": "", - "title": "Grafana Fake Data Gen - MySQL", + "title": "Datasource tests - MySQL", "uid": "DGsCac3kz", "version": 8 } \ No newline at end of file diff --git a/docker/blocks/mysql_tests/dashboard.json b/devenv/dev-dashboards/datasource_tests_mysql_unittest.json similarity index 96% rename from docker/blocks/mysql_tests/dashboard.json rename to devenv/dev-dashboards/datasource_tests_mysql_unittest.json index 53f313315bde..2c20969da122 100644 --- a/docker/blocks/mysql_tests/dashboard.json +++ b/devenv/dev-dashboards/datasource_tests_mysql_unittest.json @@ -1,40 +1,4 @@ { - "__inputs": [ - { - "name": "DS_MYSQL_TEST", - "label": "MySQL TEST", - "description": "", - "type": "datasource", - "pluginId": "mysql", - "pluginName": "MySQL" - } - ], - "__requires": [ - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "5.0.0" - }, - { - "type": "datasource", - "id": "mysql", - "name": "MySQL", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "table", - "name": "Table", - "version": "5.0.0" - } - ], "annotations": { "list": [ { @@ -47,7 +11,7 @@ "type": "dashboard" }, { - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "enable": false, "hide": false, "iconColor": "#6ed0e0", @@ -59,7 +23,7 @@ "type": "tags" }, { - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "enable": false, "hide": false, "iconColor": "rgba(255, 96, 96, 1)", @@ -71,7 +35,7 @@ "type": "tags" }, { - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "enable": false, "hide": false, "iconColor": "#7eb26d", @@ -83,7 +47,7 @@ "type": "tags" }, { - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "enable": false, "hide": false, "iconColor": "#1f78c1", @@ -96,16 +60,17 @@ } ] }, + "description": "Run the mysql unit tests to generate the data backing this dashboard", "editable": true, "gnetId": null, "graphTooltip": 0, - "id": null, - "iteration": 1523320712115, + "id": 39, + "iteration": 1532620354037, "links": [], "panels": [ { "columns": [], - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fontSize": "100%", "gridPos": { "h": 4, @@ -152,7 +117,7 @@ }, { "columns": [], - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -206,7 +171,7 @@ }, { "columns": [], - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -260,7 +225,7 @@ }, { "columns": [], - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -314,7 +279,7 @@ }, { "columns": [], - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -371,7 +336,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -454,7 +419,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -537,7 +502,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -620,7 +585,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -703,7 +668,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -786,7 +751,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -869,7 +834,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -962,7 +927,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1059,7 +1024,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1152,7 +1117,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1237,7 +1202,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1330,7 +1295,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1415,7 +1380,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1508,7 +1473,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1593,7 +1558,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1687,7 +1652,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1774,7 +1739,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1868,7 +1833,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1955,7 +1920,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2049,7 +2014,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2136,7 +2101,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2230,7 +2195,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MYSQL_TEST}", + "datasource": "gdev-mysql-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2316,22 +2281,42 @@ "refresh": false, "schemaVersion": 16, "style": "dark", - "tags": [], + "tags": ["gdev", "mysql"], "templating": { "list": [ { "allValue": "", - "current": {}, - "datasource": "${DS_MYSQL_TEST}", + "current": { + "text": "All", + "value": "$__all" + }, + "datasource": "gdev-mysql-ds-tests", "hide": 0, "includeAll": true, "label": "Metric", "multi": true, "name": "metric", - "options": [], + "options": [ + { + "selected": true, + "text": "All", + "value": "$__all" + }, + { + "selected": false, + "text": "Metric A", + "value": "Metric A" + }, + { + "selected": false, + "text": "Metric B", + "value": "Metric B" + } + ], "query": "SELECT DISTINCT measurement FROM metric_values", - "refresh": 1, + "refresh": 0, "regex": "", + "skipUrlSync": false, "sort": 0, "tagValuesQuery": "", "tags": [], @@ -2384,6 +2369,7 @@ ], "query": "1s,10s,30s,1m,5m,10m", "refresh": 2, + "skipUrlSync": false, "type": "interval" } ] @@ -2418,7 +2404,7 @@ ] }, "timezone": "", - "title": "MySQL Data Source Test", + "title": "Datasource tests - MySQL (unittest)", "uid": "Hmf8FDkmz", "version": 12 } \ No newline at end of file diff --git a/docker/blocks/postgres/dashboard.json b/devenv/dev-dashboards/datasource_tests_postgres_fakedata.json similarity index 91% rename from docker/blocks/postgres/dashboard.json rename to devenv/dev-dashboards/datasource_tests_postgres_fakedata.json index 77b0ceac6244..1afa6e25df86 100644 --- a/docker/blocks/postgres/dashboard.json +++ b/devenv/dev-dashboards/datasource_tests_postgres_fakedata.json @@ -1,40 +1,4 @@ { - "__inputs": [ - { - "name": "DS_POSTGRESQL", - "label": "PostgreSQL", - "description": "", - "type": "datasource", - "pluginId": "postgres", - "pluginName": "PostgreSQL" - } - ], - "__requires": [ - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "" - }, - { - "type": "datasource", - "id": "postgres", - "name": "PostgreSQL", - "version": "1.0.0" - }, - { - "type": "panel", - "id": "table", - "name": "Table", - "version": "" - } - ], "annotations": { "list": [ { @@ -52,8 +16,8 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": null, - "iteration": 1518601837383, + "id": 5, + "iteration": 1532620601931, "links": [], "panels": [ { @@ -63,7 +27,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRESQL}", + "datasource": "gdev-postgres", "fill": 2, "gridPos": { "h": 9, @@ -150,14 +114,18 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRESQL}", + "datasource": "gdev-postgres", "fill": 2, "gridPos": { "h": 18, @@ -236,14 +204,18 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRESQL}", + "datasource": "gdev-postgres", "fill": 2, "gridPos": { "h": 9, @@ -316,11 +288,15 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "columns": [], - "datasource": "${DS_POSTGRESQL}", + "datasource": "gdev-postgres", "fontSize": "100%", "gridPos": { "h": 9, @@ -377,6 +353,7 @@ "schemaVersion": 16, "style": "dark", "tags": [ + "gdev", "fake-data-gen", "postgres" ], @@ -384,8 +361,11 @@ "list": [ { "allValue": null, - "current": {}, - "datasource": "${DS_POSTGRESQL}", + "current": { + "text": "America", + "value": "America" + }, + "datasource": "gdev-postgres", "hide": 0, "includeAll": false, "label": "Datacenter", @@ -395,6 +375,7 @@ "query": "SELECT DISTINCT datacenter FROM grafana_metric", "refresh": 1, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -404,8 +385,11 @@ }, { "allValue": null, - "current": {}, - "datasource": "${DS_POSTGRESQL}", + "current": { + "text": "All", + "value": "$__all" + }, + "datasource": "gdev-postgres", "hide": 0, "includeAll": true, "label": "Hostname", @@ -415,6 +399,7 @@ "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'", "refresh": 1, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -507,6 +492,7 @@ ], "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d", "refresh": 2, + "skipUrlSync": false, "type": "interval" } ] @@ -541,7 +527,7 @@ ] }, "timezone": "", - "title": "Grafana Fake Data Gen - PostgreSQL", + "title": "Datasource tests - Postgres", "uid": "JYola5qzz", - "version": 1 + "version": 4 } \ No newline at end of file diff --git a/docker/blocks/postgres_tests/dashboard.json b/devenv/dev-dashboards/datasource_tests_postgres_unittest.json similarity index 95% rename from docker/blocks/postgres_tests/dashboard.json rename to devenv/dev-dashboards/datasource_tests_postgres_unittest.json index 9efbe90bdfec..d7d5f238e85f 100644 --- a/docker/blocks/postgres_tests/dashboard.json +++ b/devenv/dev-dashboards/datasource_tests_postgres_unittest.json @@ -1,40 +1,4 @@ { - "__inputs": [ - { - "name": "DS_POSTGRES_TEST", - "label": "Postgres TEST", - "description": "", - "type": "datasource", - "pluginId": "postgres", - "pluginName": "PostgreSQL" - } - ], - "__requires": [ - { - "type": "grafana", - "id": "grafana", - "name": "Grafana", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "graph", - "name": "Graph", - "version": "5.0.0" - }, - { - "type": "datasource", - "id": "postgres", - "name": "PostgreSQL", - "version": "5.0.0" - }, - { - "type": "panel", - "id": "table", - "name": "Table", - "version": "5.0.0" - } - ], "annotations": { "list": [ { @@ -47,7 +11,7 @@ "type": "dashboard" }, { - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "enable": false, "hide": false, "iconColor": "#6ed0e0", @@ -59,7 +23,7 @@ "type": "tags" }, { - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "enable": false, "hide": false, "iconColor": "rgba(255, 96, 96, 1)", @@ -71,7 +35,7 @@ "type": "tags" }, { - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "enable": false, "hide": false, "iconColor": "#7eb26d", @@ -83,7 +47,7 @@ "type": "tags" }, { - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "enable": false, "hide": false, "iconColor": "#1f78c1", @@ -96,16 +60,17 @@ } ] }, + "description": "Run the postgres unit tests to generate the data backing this dashboard", "editable": true, "gnetId": null, "graphTooltip": 0, - "id": null, - "iteration": 1523320929325, + "id": 38, + "iteration": 1532619575136, "links": [], "panels": [ { "columns": [], - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fontSize": "100%", "gridPos": { "h": 4, @@ -152,7 +117,7 @@ }, { "columns": [], - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -206,7 +171,7 @@ }, { "columns": [], - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -260,7 +225,7 @@ }, { "columns": [], - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -314,7 +279,7 @@ }, { "columns": [], - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fontSize": "100%", "gridPos": { "h": 3, @@ -371,7 +336,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -454,7 +419,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -537,7 +502,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -620,7 +585,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -703,7 +668,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -786,7 +751,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 9, @@ -869,7 +834,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -962,7 +927,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1047,7 +1012,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1140,7 +1105,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1225,7 +1190,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1318,7 +1283,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1403,7 +1368,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1496,7 +1461,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 2, "gridPos": { "h": 8, @@ -1581,7 +1546,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1675,7 +1640,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1762,7 +1727,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1856,7 +1821,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -1943,7 +1908,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2037,7 +2002,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2124,7 +2089,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2218,7 +2183,7 @@ "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_POSTGRES_TEST}", + "datasource": "gdev-postgres-ds-tests", "fill": 1, "gridPos": { "h": 8, @@ -2304,22 +2269,46 @@ "refresh": false, "schemaVersion": 16, "style": "dark", - "tags": [], + "tags": ["gdev", "postgres"], "templating": { "list": [ { "allValue": null, - "current": {}, - "datasource": "${DS_POSTGRES_TEST}", + "current": { + "selected": true, + "tags": [], + "text": "All", + "value": [ + "$__all" + ] + }, + "datasource": "gdev-postgres-ds-tests", "hide": 0, "includeAll": true, "label": "Metric", "multi": true, "name": "metric", - "options": [], + "options": [ + { + "selected": true, + "text": "All", + "value": "$__all" + }, + { + "selected": false, + "text": "Metric A", + "value": "Metric A" + }, + { + "selected": false, + "text": "Metric B", + "value": "Metric B" + } + ], "query": "SELECT DISTINCT measurement FROM metric_values", - "refresh": 1, + "refresh": 0, "regex": "", + "skipUrlSync": false, "sort": 1, "tagValuesQuery": "", "tags": [], @@ -2372,6 +2361,7 @@ ], "query": "1s,10s,30s,1m,5m,10m", "refresh": 2, + "skipUrlSync": false, "type": "interval" } ] @@ -2406,7 +2396,7 @@ ] }, "timezone": "", - "title": "Postgres Data Source Test", + "title": "Datasource tests - Postgres (unittest)", "uid": "vHQdlVziz", - "version": 14 + "version": 17 } \ No newline at end of file From ab8fa0de7443136afeab82fcf8713fddbdc23a48 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Thu, 26 Jul 2018 21:39:02 +0200 Subject: [PATCH 215/263] elasticsearch: support reversed index patterns Now both [index-]pattern and pattern[-index] are supported --- .../elasticsearch/client/index_pattern.go | 35 ++++++++++++++----- .../client/index_pattern_test.go | 27 +++++++++++++- 2 files changed, 53 insertions(+), 9 deletions(-) diff --git a/pkg/tsdb/elasticsearch/client/index_pattern.go b/pkg/tsdb/elasticsearch/client/index_pattern.go index 8391e902ea47..952b5c4f8066 100644 --- a/pkg/tsdb/elasticsearch/client/index_pattern.go +++ b/pkg/tsdb/elasticsearch/client/index_pattern.go @@ -248,13 +248,28 @@ var datePatternReplacements = map[string]string{ func formatDate(t time.Time, pattern string) string { var datePattern string - parts := strings.Split(strings.TrimLeft(pattern, "["), "]") - base := parts[0] - if len(parts) == 2 { - datePattern = parts[1] - } else { - datePattern = base - base = "" + base := "" + ltr := false + + if strings.HasPrefix(pattern, "[") { + parts := strings.Split(strings.TrimLeft(pattern, "["), "]") + base = parts[0] + if len(parts) == 2 { + datePattern = parts[1] + } else { + datePattern = base + base = "" + } + ltr = true + } else if strings.HasSuffix(pattern, "]") { + parts := strings.Split(strings.TrimRight(pattern, "]"), "[") + datePattern = parts[0] + if len(parts) == 2 { + base = parts[1] + } else { + base = "" + } + ltr = false } formatted := t.Format(patternToLayout(datePattern)) @@ -293,7 +308,11 @@ func formatDate(t time.Time, pattern string) string { formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", t.Hour()), -1) } - return base + formatted + if ltr { + return base + formatted + } + + return formatted + base } func patternToLayout(pattern string) string { diff --git a/pkg/tsdb/elasticsearch/client/index_pattern_test.go b/pkg/tsdb/elasticsearch/client/index_pattern_test.go index 3bd823d8c87a..ca20b39d5328 100644 --- a/pkg/tsdb/elasticsearch/client/index_pattern_test.go +++ b/pkg/tsdb/elasticsearch/client/index_pattern_test.go @@ -28,29 +28,54 @@ func TestIndexPattern(t *testing.T) { to := fmt.Sprintf("%d", time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond)) indexPatternScenario(intervalHourly, "[data-]YYYY.MM.DD.HH", tsdb.NewTimeRange(from, to), func(indices []string) { - //So(indices, ShouldHaveLength, 1) + So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05.15.17") }) + indexPatternScenario(intervalHourly, "YYYY.MM.DD.HH[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.05.15.17-data") + }) + indexPatternScenario(intervalDaily, "[data-]YYYY.MM.DD", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05.15") }) + indexPatternScenario(intervalDaily, "YYYY.MM.DD[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.05.15-data") + }) + indexPatternScenario(intervalWeekly, "[data-]GGGG.WW", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.20") }) + indexPatternScenario(intervalWeekly, "GGGG.WW[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.20-data") + }) + indexPatternScenario(intervalMonthly, "[data-]YYYY.MM", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05") }) + indexPatternScenario(intervalMonthly, "YYYY.MM[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018.05-data") + }) + indexPatternScenario(intervalYearly, "[data-]YYYY", tsdb.NewTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018") }) + + indexPatternScenario(intervalYearly, "YYYY[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "2018-data") + }) }) Convey("Hourly interval", t, func() { From 48e5e65c73eea000bf2b702b8743de0146e29f86 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 27 Jul 2018 10:33:06 +0200 Subject: [PATCH 216/263] changelog: add notes about closing #12731 [skip ci] --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6409f094f657..ad1b63234e9f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -23,6 +23,7 @@ * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek) * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda) * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668) +* **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731) # 5.2.2 (2018-07-25) From 675a031b6c9c367fe27de5e839c1d919ca09021d Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 11:04:01 +0200 Subject: [PATCH 217/263] All except one passing --- public/app/plugins/panel/singlestat/module.ts | 5 ++++- public/app/plugins/panel/singlestat/specs/singlestat.jest.ts | 4 ++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/public/app/plugins/panel/singlestat/module.ts b/public/app/plugins/panel/singlestat/module.ts index 7fafb5902d13..b63182141c19 100644 --- a/public/app/plugins/panel/singlestat/module.ts +++ b/public/app/plugins/panel/singlestat/module.ts @@ -310,11 +310,14 @@ class SingleStatCtrl extends MetricsPanelCtrl { data.valueRounded = data.value; data.valueFormatted = formatFunc(data.value, this.dashboard.isTimezoneUtc()); } else { - console.log(lastPoint, lastValue); + // console.log(lastPoint, lastValue); + // console.log(this.panel.valueName); + // console.log(this.panel); data.value = this.series[0].stats[this.panel.valueName]; data.flotpairs = this.series[0].flotpairs; let decimalInfo = this.getDecimalsForValue(data.value); + console.log(decimalInfo); let formatFunc = kbn.valueFormats[this.panel.format]; data.valueFormatted = formatFunc(data.value, decimalInfo.decimals, decimalInfo.scaledDecimals); data.valueRounded = kbn.roundValue(data.value, decimalInfo.decimals); diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts index 7b89f86250c4..798298415a9c 100644 --- a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -192,6 +192,8 @@ describe('SingleStatCtrl', function() { ) { ctx.setup(function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[99.999, 1], [99.99999, 2]] }]; + ctx.ctrl.panel.valueName = 'avg'; + ctx.ctrl.panel.format = 'none'; }); it('Should be rounded', function() { @@ -259,7 +261,9 @@ describe('SingleStatCtrl', function() { singleStatScenario('with default values', function(ctx) { ctx.setup(function() { ctx.data = tableData; + ctx.ctrl.panel = {}; ctx.ctrl.panel.tableColumn = 'mean'; + ctx.ctrl.panel.format = 'none'; }); it('Should use first rows value as default main value', function() { From 47da3e3ae83f36207cedfa26e9b5d51ca21b112f Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 11:28:16 +0200 Subject: [PATCH 218/263] All tests passing --- public/app/plugins/panel/singlestat/module.ts | 4 ---- public/app/plugins/panel/singlestat/specs/singlestat.jest.ts | 2 ++ 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/public/app/plugins/panel/singlestat/module.ts b/public/app/plugins/panel/singlestat/module.ts index b63182141c19..ebd2628b0864 100644 --- a/public/app/plugins/panel/singlestat/module.ts +++ b/public/app/plugins/panel/singlestat/module.ts @@ -310,14 +310,10 @@ class SingleStatCtrl extends MetricsPanelCtrl { data.valueRounded = data.value; data.valueFormatted = formatFunc(data.value, this.dashboard.isTimezoneUtc()); } else { - // console.log(lastPoint, lastValue); - // console.log(this.panel.valueName); - // console.log(this.panel); data.value = this.series[0].stats[this.panel.valueName]; data.flotpairs = this.series[0].flotpairs; let decimalInfo = this.getDecimalsForValue(data.value); - console.log(decimalInfo); let formatFunc = kbn.valueFormats[this.panel.format]; data.valueFormatted = formatFunc(data.value, decimalInfo.decimals, decimalInfo.scaledDecimals); data.valueRounded = kbn.roundValue(data.value, decimalInfo.decimals); diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts index 798298415a9c..552ac2412d65 100644 --- a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -293,6 +293,7 @@ describe('SingleStatCtrl', function() { ctx.setup(function() { ctx.data = tableData; ctx.data[0].rows[0] = [1492759673649, 'ignore1', 99.99999, 'ignore2']; + ctx.ctrl.panel.mappingType = 0; ctx.ctrl.panel.tableColumn = 'mean'; }); @@ -310,6 +311,7 @@ describe('SingleStatCtrl', function() { ctx.setup(function() { ctx.data = tableData; ctx.data[0].rows[0] = [1492759673649, 'ignore1', 9.9, 'ignore2']; + ctx.ctrl.panel.mappingType = 2; ctx.ctrl.panel.tableColumn = 'mean'; ctx.ctrl.panel.valueMaps = [{ value: '10', text: 'OK' }]; }); From 3d21e42aac715c28fe3325bd3ce9f7a00cb39312 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 11:30:37 +0200 Subject: [PATCH 219/263] Remove Karma file --- .../singlestat/specs/singlestat_specs.ts | 362 ------------------ 1 file changed, 362 deletions(-) delete mode 100644 public/app/plugins/panel/singlestat/specs/singlestat_specs.ts diff --git a/public/app/plugins/panel/singlestat/specs/singlestat_specs.ts b/public/app/plugins/panel/singlestat/specs/singlestat_specs.ts deleted file mode 100644 index 217ec5ee04c2..000000000000 --- a/public/app/plugins/panel/singlestat/specs/singlestat_specs.ts +++ /dev/null @@ -1,362 +0,0 @@ -import { describe, beforeEach, afterEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - -import helpers from 'test/specs/helpers'; -import { SingleStatCtrl } from '../module'; -import moment from 'moment'; - -describe('SingleStatCtrl', function() { - var ctx = new helpers.ControllerTestContext(); - var epoch = 1505826363746; - var clock; - - function singleStatScenario(desc, func) { - describe(desc, function() { - ctx.setup = function(setupFunc) { - beforeEach(angularMocks.module('grafana.services')); - beforeEach(angularMocks.module('grafana.controllers')); - beforeEach( - angularMocks.module(function($compileProvider) { - $compileProvider.preAssignBindingsEnabled(true); - }) - ); - - beforeEach(ctx.providePhase()); - beforeEach(ctx.createPanelController(SingleStatCtrl)); - - beforeEach(function() { - setupFunc(); - ctx.ctrl.onDataReceived(ctx.data); - ctx.data = ctx.ctrl.data; - }); - }; - - func(ctx); - }); - } - - singleStatScenario('with defaults', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 1], [20, 2]] }]; - }); - - it('Should use series avg as default main value', function() { - expect(ctx.data.value).to.be(15); - expect(ctx.data.valueRounded).to.be(15); - }); - - it('should set formatted falue', function() { - expect(ctx.data.valueFormatted).to.be('15'); - }); - }); - - singleStatScenario('showing serie name instead of value', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 1], [20, 2]] }]; - ctx.ctrl.panel.valueName = 'name'; - }); - - it('Should use series avg as default main value', function() { - expect(ctx.data.value).to.be(0); - expect(ctx.data.valueRounded).to.be(0); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be('test.cpu1'); - }); - }); - - singleStatScenario('showing last iso time instead of value', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; - ctx.ctrl.panel.valueName = 'last_time'; - ctx.ctrl.panel.format = 'dateTimeAsIso'; - }); - - it('Should use time instead of value', function() { - expect(ctx.data.value).to.be(1505634997920); - expect(ctx.data.valueRounded).to.be(1505634997920); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be(moment(1505634997920).format('YYYY-MM-DD HH:mm:ss')); - }); - }); - - singleStatScenario('showing last iso time instead of value (in UTC)', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; - ctx.ctrl.panel.valueName = 'last_time'; - ctx.ctrl.panel.format = 'dateTimeAsIso'; - ctx.setIsUtc(true); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be(moment.utc(1505634997920).format('YYYY-MM-DD HH:mm:ss')); - }); - }); - - singleStatScenario('showing last us time instead of value', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; - ctx.ctrl.panel.valueName = 'last_time'; - ctx.ctrl.panel.format = 'dateTimeAsUS'; - }); - - it('Should use time instead of value', function() { - expect(ctx.data.value).to.be(1505634997920); - expect(ctx.data.valueRounded).to.be(1505634997920); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be(moment(1505634997920).format('MM/DD/YYYY h:mm:ss a')); - }); - }); - - singleStatScenario('showing last us time instead of value (in UTC)', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; - ctx.ctrl.panel.valueName = 'last_time'; - ctx.ctrl.panel.format = 'dateTimeAsUS'; - ctx.setIsUtc(true); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be(moment.utc(1505634997920).format('MM/DD/YYYY h:mm:ss a')); - }); - }); - - singleStatScenario('showing last time from now instead of value', function(ctx) { - beforeEach(() => { - clock = sinon.useFakeTimers(epoch); - }); - - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; - ctx.ctrl.panel.valueName = 'last_time'; - ctx.ctrl.panel.format = 'dateTimeFromNow'; - }); - - it('Should use time instead of value', function() { - expect(ctx.data.value).to.be(1505634997920); - expect(ctx.data.valueRounded).to.be(1505634997920); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be('2 days ago'); - }); - - afterEach(() => { - clock.restore(); - }); - }); - - singleStatScenario('showing last time from now instead of value (in UTC)', function(ctx) { - beforeEach(() => { - clock = sinon.useFakeTimers(epoch); - }); - - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; - ctx.ctrl.panel.valueName = 'last_time'; - ctx.ctrl.panel.format = 'dateTimeFromNow'; - ctx.setIsUtc(true); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be('2 days ago'); - }); - - afterEach(() => { - clock.restore(); - }); - }); - - singleStatScenario('MainValue should use same number for decimals as displayed when checking thresholds', function( - ctx - ) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[99.999, 1], [99.99999, 2]] }]; - }); - - it('Should be rounded', function() { - expect(ctx.data.value).to.be(99.999495); - expect(ctx.data.valueRounded).to.be(100); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be('100'); - }); - }); - - singleStatScenario('When value to text mapping is specified', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[9.9, 1]] }]; - ctx.ctrl.panel.valueMaps = [{ value: '10', text: 'OK' }]; - }); - - it('value should remain', function() { - expect(ctx.data.value).to.be(9.9); - }); - - it('round should be rounded up', function() { - expect(ctx.data.valueRounded).to.be(10); - }); - - it('Should replace value with text', function() { - expect(ctx.data.valueFormatted).to.be('OK'); - }); - }); - - singleStatScenario('When range to text mapping is specified for first range', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[41, 50]] }]; - ctx.ctrl.panel.mappingType = 2; - ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; - }); - - it('Should replace value with text OK', function() { - expect(ctx.data.valueFormatted).to.be('OK'); - }); - }); - - singleStatScenario('When range to text mapping is specified for other ranges', function(ctx) { - ctx.setup(function() { - ctx.data = [{ target: 'test.cpu1', datapoints: [[65, 75]] }]; - ctx.ctrl.panel.mappingType = 2; - ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; - }); - - it('Should replace value with text NOT OK', function() { - expect(ctx.data.valueFormatted).to.be('NOT OK'); - }); - }); - - describe('When table data', function() { - const tableData = [ - { - columns: [{ text: 'Time', type: 'time' }, { text: 'test1' }, { text: 'mean' }, { text: 'test2' }], - rows: [[1492759673649, 'ignore1', 15, 'ignore2']], - type: 'table', - }, - ]; - - singleStatScenario('with default values', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.ctrl.panel.tableColumn = 'mean'; - }); - - it('Should use first rows value as default main value', function() { - expect(ctx.data.value).to.be(15); - expect(ctx.data.valueRounded).to.be(15); - }); - - it('should set formatted value', function() { - expect(ctx.data.valueFormatted).to.be('15'); - }); - }); - - singleStatScenario('When table data has multiple columns', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.ctrl.panel.tableColumn = ''; - }); - - it('Should set column to first column that is not time', function() { - expect(ctx.ctrl.panel.tableColumn).to.be('test1'); - }); - }); - - singleStatScenario('MainValue should use same number for decimals as displayed when checking thresholds', function( - ctx - ) { - ctx.setup(function() { - ctx.data = tableData; - ctx.data[0].rows[0] = [1492759673649, 'ignore1', 99.99999, 'ignore2']; - ctx.ctrl.panel.tableColumn = 'mean'; - }); - - it('Should be rounded', function() { - expect(ctx.data.value).to.be(99.99999); - expect(ctx.data.valueRounded).to.be(100); - }); - - it('should set formatted falue', function() { - expect(ctx.data.valueFormatted).to.be('100'); - }); - }); - - singleStatScenario('When value to text mapping is specified', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.data[0].rows[0] = [1492759673649, 'ignore1', 9.9, 'ignore2']; - ctx.ctrl.panel.tableColumn = 'mean'; - ctx.ctrl.panel.valueMaps = [{ value: '10', text: 'OK' }]; - }); - - it('value should remain', function() { - expect(ctx.data.value).to.be(9.9); - }); - - it('round should be rounded up', function() { - expect(ctx.data.valueRounded).to.be(10); - }); - - it('Should replace value with text', function() { - expect(ctx.data.valueFormatted).to.be('OK'); - }); - }); - - singleStatScenario('When range to text mapping is specified for first range', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.data[0].rows[0] = [1492759673649, 'ignore1', 41, 'ignore2']; - ctx.ctrl.panel.tableColumn = 'mean'; - ctx.ctrl.panel.mappingType = 2; - ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; - }); - - it('Should replace value with text OK', function() { - expect(ctx.data.valueFormatted).to.be('OK'); - }); - }); - - singleStatScenario('When range to text mapping is specified for other ranges', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.data[0].rows[0] = [1492759673649, 'ignore1', 65, 'ignore2']; - ctx.ctrl.panel.tableColumn = 'mean'; - ctx.ctrl.panel.mappingType = 2; - ctx.ctrl.panel.rangeMaps = [{ from: '10', to: '50', text: 'OK' }, { from: '51', to: '100', text: 'NOT OK' }]; - }); - - it('Should replace value with text NOT OK', function() { - expect(ctx.data.valueFormatted).to.be('NOT OK'); - }); - }); - - singleStatScenario('When value is string', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.data[0].rows[0] = [1492759673649, 'ignore1', 65, 'ignore2']; - ctx.ctrl.panel.tableColumn = 'test1'; - }); - - it('Should replace value with text NOT OK', function() { - expect(ctx.data.valueFormatted).to.be('ignore1'); - }); - }); - - singleStatScenario('When value is zero', function(ctx) { - ctx.setup(function() { - ctx.data = tableData; - ctx.data[0].rows[0] = [1492759673649, 'ignore1', 0, 'ignore2']; - ctx.ctrl.panel.tableColumn = 'mean'; - }); - - it('Should return zero', function() { - expect(ctx.data.value).to.be(0); - }); - }); - }); -}); From bff7a293562125dc8423919f23a871d7141fa189 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 11:34:14 +0200 Subject: [PATCH 220/263] Cleanup --- .../panel/singlestat/specs/singlestat.jest.ts | 26 ------------------- 1 file changed, 26 deletions(-) diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts index 552ac2412d65..7e8915ca5375 100644 --- a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -1,6 +1,3 @@ -// import { describe, beforeEach, afterEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - -// import helpers from 'test/specs/helpers'; import { SingleStatCtrl } from '../module'; import moment from 'moment'; @@ -30,17 +27,6 @@ describe('SingleStatCtrl', function() { function singleStatScenario(desc, func) { describe(desc, function() { ctx.setup = function(setupFunc) { - // beforeEach(angularMocks.module('grafana.services')); - // beforeEach(angularMocks.module('grafana.controllers')); - // beforeEach( - // angularMocks.module(function($compileProvider) { - // $compileProvider.preAssignBindingsEnabled(true); - // }) - // ); - - // beforeEach(ctx.providePhase()); - // beforeEach(ctx.createPanelController(SingleStatCtrl)); - beforeEach(function() { ctx.ctrl = new SingleStatCtrl($scope, $injector, {}); setupFunc(); @@ -107,7 +93,6 @@ describe('SingleStatCtrl', function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 5000]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeAsIso'; - // ctx.setIsUtc(true); ctx.ctrl.dashboard.isTimezoneUtc = () => true; }); @@ -139,7 +124,6 @@ describe('SingleStatCtrl', function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 5000]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeAsUS'; - // ctx.setIsUtc(true); ctx.ctrl.dashboard.isTimezoneUtc = () => true; }); @@ -149,11 +133,6 @@ describe('SingleStatCtrl', function() { }); singleStatScenario('showing last time from now instead of value', function(ctx) { - beforeEach(() => { - // clock = sinon.useFakeTimers(epoch); - //jest.useFakeTimers(); - }); - ctx.setup(function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; ctx.ctrl.panel.valueName = 'last_time'; @@ -168,10 +147,6 @@ describe('SingleStatCtrl', function() { it('should set formatted value', function() { expect(ctx.data.valueFormatted).toBe('2 days ago'); }); - - afterEach(() => { - // jest.clearAllTimers(); - }); }); singleStatScenario('showing last time from now instead of value (in UTC)', function(ctx) { @@ -179,7 +154,6 @@ describe('SingleStatCtrl', function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; ctx.ctrl.panel.valueName = 'last_time'; ctx.ctrl.panel.format = 'dateTimeFromNow'; - // ctx.setIsUtc(true); }); it('should set formatted value', function() { From e43feb7bfa0551125f82dbcf6503564227f091a1 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 27 Jul 2018 13:21:40 +0200 Subject: [PATCH 221/263] use const for rowlimit in sql engine --- pkg/tsdb/sql_engine.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go index 9321e8912dc5..27ed37923a36 100644 --- a/pkg/tsdb/sql_engine.go +++ b/pkg/tsdb/sql_engine.go @@ -100,6 +100,8 @@ var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransfo return &queryEndpoint, nil } +const rowLimit = 1000000 + // Query is the main function for the SqlQueryEndpoint func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *TsdbQuery) (*Response, error) { result := &Response{ @@ -164,7 +166,6 @@ func (e *sqlQueryEndpoint) transformToTable(query *Query, rows *core.Rows, resul return err } - rowLimit := 1000000 rowCount := 0 timeIndex := -1 @@ -225,7 +226,6 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, return err } - rowLimit := 1000000 rowCount := 0 timeIndex := -1 metricIndex := -1 From 67c613a45a3ab3b15b587e6999e83a63d52a1582 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 13:29:57 +0200 Subject: [PATCH 222/263] Begin conversion --- public/app/core/specs/backend_srv.jest.ts | 39 +++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 public/app/core/specs/backend_srv.jest.ts diff --git a/public/app/core/specs/backend_srv.jest.ts b/public/app/core/specs/backend_srv.jest.ts new file mode 100644 index 000000000000..6281f3814ce6 --- /dev/null +++ b/public/app/core/specs/backend_srv.jest.ts @@ -0,0 +1,39 @@ +import { BackendSrv } from 'app/core/services/backend_srv'; +jest.mock('app/core/store'); + +describe('backend_srv', function() { + let _httpBackend = options => { + if (options.method === 'GET' && options.url === 'gateway-error') { + return Promise.reject({ status: 502 }); + } else if (options.method === 'POST') { + // return Promise.resolve({}); + } + return Promise.resolve({}); + }; + + let _backendSrv = new BackendSrv(_httpBackend, {}, {}, {}, {}); + + // beforeEach(angularMocks.module('grafana.core')); + // beforeEach(angularMocks.module('grafana.services')); + // beforeEach( + // angularMocks.inject(function($httpBackend, $http, backendSrv) { + // _httpBackend = $httpBackend; + // _backendSrv = backendSrv; + // }) + // ); + + describe('when handling errors', function() { + it('should return the http status code', function(done) { + // _httpBackend.whenGET('gateway-error').respond(502); + _backendSrv + .datasourceRequest({ + url: 'gateway-error', + }) + .catch(function(err) { + expect(err.status).toBe(502); + done(); + }); + // _httpBackend.flush(); + }); + }); +}); From b4ac3f2379e675439f571c308eb36581d4a39984 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 27 Jul 2018 13:33:50 +0200 Subject: [PATCH 223/263] update devenv datasources and dashboards for sql datasources --- devenv/dev-dashboards/datasource_tests_mssql_fakedata.json | 1 - devenv/dev-dashboards/datasource_tests_mssql_unittest.json | 1 - devenv/dev-dashboards/datasource_tests_mysql_fakedata.json | 1 - devenv/dev-dashboards/datasource_tests_mysql_unittest.json | 1 - devenv/dev-dashboards/datasource_tests_postgres_fakedata.json | 1 - devenv/dev-dashboards/datasource_tests_postgres_unittest.json | 1 - 6 files changed, 6 deletions(-) diff --git a/devenv/dev-dashboards/datasource_tests_mssql_fakedata.json b/devenv/dev-dashboards/datasource_tests_mssql_fakedata.json index 4350b5e44a82..e810a686134b 100644 --- a/devenv/dev-dashboards/datasource_tests_mssql_fakedata.json +++ b/devenv/dev-dashboards/datasource_tests_mssql_fakedata.json @@ -16,7 +16,6 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": 203, "iteration": 1532618661457, "links": [], "panels": [ diff --git a/devenv/dev-dashboards/datasource_tests_mssql_unittest.json b/devenv/dev-dashboards/datasource_tests_mssql_unittest.json index 5c8eb8243a30..d47cfb0ad6ea 100644 --- a/devenv/dev-dashboards/datasource_tests_mssql_unittest.json +++ b/devenv/dev-dashboards/datasource_tests_mssql_unittest.json @@ -64,7 +64,6 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": 35, "iteration": 1532618879985, "links": [], "panels": [ diff --git a/devenv/dev-dashboards/datasource_tests_mysql_fakedata.json b/devenv/dev-dashboards/datasource_tests_mysql_fakedata.json index cef8fd4783f8..ebeb452fc4c7 100644 --- a/devenv/dev-dashboards/datasource_tests_mysql_fakedata.json +++ b/devenv/dev-dashboards/datasource_tests_mysql_fakedata.json @@ -16,7 +16,6 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": 4, "iteration": 1532620738041, "links": [], "panels": [ diff --git a/devenv/dev-dashboards/datasource_tests_mysql_unittest.json b/devenv/dev-dashboards/datasource_tests_mysql_unittest.json index 2c20969da122..326114ec8ff6 100644 --- a/devenv/dev-dashboards/datasource_tests_mysql_unittest.json +++ b/devenv/dev-dashboards/datasource_tests_mysql_unittest.json @@ -64,7 +64,6 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": 39, "iteration": 1532620354037, "links": [], "panels": [ diff --git a/devenv/dev-dashboards/datasource_tests_postgres_fakedata.json b/devenv/dev-dashboards/datasource_tests_postgres_fakedata.json index 1afa6e25df86..508cae86bc3a 100644 --- a/devenv/dev-dashboards/datasource_tests_postgres_fakedata.json +++ b/devenv/dev-dashboards/datasource_tests_postgres_fakedata.json @@ -16,7 +16,6 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": 5, "iteration": 1532620601931, "links": [], "panels": [ diff --git a/devenv/dev-dashboards/datasource_tests_postgres_unittest.json b/devenv/dev-dashboards/datasource_tests_postgres_unittest.json index d7d5f238e85f..85151089b7f3 100644 --- a/devenv/dev-dashboards/datasource_tests_postgres_unittest.json +++ b/devenv/dev-dashboards/datasource_tests_postgres_unittest.json @@ -64,7 +64,6 @@ "editable": true, "gnetId": null, "graphTooltip": 0, - "id": 38, "iteration": 1532619575136, "links": [], "panels": [ From 55111c801fbdc74687d74136dc73daf2aa29131c Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 13:41:07 +0200 Subject: [PATCH 224/263] Update test for local time --- .../plugins/panel/singlestat/specs/singlestat.jest.ts | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts index 7e8915ca5375..dd02b5c169c5 100644 --- a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -23,6 +23,9 @@ describe('SingleStatCtrl', function() { SingleStatCtrl.prototype.dashboard = { isTimezoneUtc: jest.fn(() => true), }; + SingleStatCtrl.prototype.events = { + on: () => {}, + }; function singleStatScenario(desc, func) { describe(desc, function() { @@ -84,7 +87,7 @@ describe('SingleStatCtrl', function() { }); it('should set formatted value', function() { - expect(ctx.data.valueFormatted).toBe('2017-09-17 09:56:37'); + expect(moment(ctx.data.valueFormatted).isSame('2017-09-17 09:56:37')).toBe(true); }); }); @@ -235,7 +238,9 @@ describe('SingleStatCtrl', function() { singleStatScenario('with default values', function(ctx) { ctx.setup(function() { ctx.data = tableData; - ctx.ctrl.panel = {}; + ctx.ctrl.panel = { + emit: () => {}, + }; ctx.ctrl.panel.tableColumn = 'mean'; ctx.ctrl.panel.format = 'none'; }); From 971e52ecc98126788066f0452aeaa7bf93f7baf2 Mon Sep 17 00:00:00 2001 From: Patrick O'Carroll Date: Fri, 27 Jul 2018 13:48:14 +0200 Subject: [PATCH 225/263] removed unused class from the deletebutton pr --- public/app/containers/Teams/TeamList.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/containers/Teams/TeamList.tsx b/public/app/containers/Teams/TeamList.tsx index b86763d87998..31406250cb3f 100644 --- a/public/app/containers/Teams/TeamList.tsx +++ b/public/app/containers/Teams/TeamList.tsx @@ -88,7 +88,7 @@ export class TeamList extends React.Component {
    -
    +
    From 4e6168f3a331e5701e279305774413eca87499d4 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 14:22:48 +0200 Subject: [PATCH 226/263] Add async/await --- public/app/core/specs/backend_srv.jest.ts | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/public/app/core/specs/backend_srv.jest.ts b/public/app/core/specs/backend_srv.jest.ts index 6281f3814ce6..2d62716622a7 100644 --- a/public/app/core/specs/backend_srv.jest.ts +++ b/public/app/core/specs/backend_srv.jest.ts @@ -3,10 +3,9 @@ jest.mock('app/core/store'); describe('backend_srv', function() { let _httpBackend = options => { - if (options.method === 'GET' && options.url === 'gateway-error') { + console.log(options); + if (options.url === 'gateway-error') { return Promise.reject({ status: 502 }); - } else if (options.method === 'POST') { - // return Promise.resolve({}); } return Promise.resolve({}); }; @@ -22,17 +21,14 @@ describe('backend_srv', function() { // }) // ); - describe('when handling errors', function() { - it('should return the http status code', function(done) { + describe('when handling errors', () => { + it('should return the http status code', async () => { // _httpBackend.whenGET('gateway-error').respond(502); - _backendSrv - .datasourceRequest({ - url: 'gateway-error', - }) - .catch(function(err) { - expect(err.status).toBe(502); - done(); - }); + let res = await _backendSrv.datasourceRequest({ + url: 'gateway-error', + }); + console.log(res); + expect(res.status).toBe(502); // _httpBackend.flush(); }); }); From 2db4a54f75c7c1bd8a3a70ea0d4be50f88ab0552 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 14:40:56 +0200 Subject: [PATCH 227/263] Fix test --- public/app/plugins/panel/singlestat/specs/singlestat.jest.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts index dd02b5c169c5..0480d0be5c30 100644 --- a/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts +++ b/public/app/plugins/panel/singlestat/specs/singlestat.jest.ts @@ -87,7 +87,7 @@ describe('SingleStatCtrl', function() { }); it('should set formatted value', function() { - expect(moment(ctx.data.valueFormatted).isSame('2017-09-17 09:56:37')).toBe(true); + expect(moment(ctx.data.valueFormatted).valueOf()).toBe(1505634997000); }); }); From 766c23a1eb86d6ba47b2d61d9b72153089b73264 Mon Sep 17 00:00:00 2001 From: Tobias Skarhed Date: Fri, 27 Jul 2018 15:16:19 +0200 Subject: [PATCH 228/263] Fix emit errors --- public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts index 3ebcf6cdf313..a0c7dd0ab9ca 100644 --- a/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts +++ b/public/app/plugins/panel/graph/specs/graph_ctrl.jest.ts @@ -34,6 +34,9 @@ describe('GraphCtrl', () => { beforeEach(() => { ctx.ctrl = new GraphCtrl(scope, injector, {}); + ctx.ctrl.events = { + emit: () => {}, + }; ctx.ctrl.annotationsPromise = Promise.resolve({}); ctx.ctrl.updateTimeRange(); }); From b28a362635876bc321063127f0e3ddf3d599cb79 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sat, 21 Jul 2018 11:04:05 +0200 Subject: [PATCH 229/263] Use metric column as prefix If multiple value columns are returned and a metric column is returned aswell the metric column will be used as prefix for the series name --- docs/sources/features/datasources/postgres.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sources/features/datasources/postgres.md b/docs/sources/features/datasources/postgres.md index f9af60a2efc1..f3e52ed6652a 100644 --- a/docs/sources/features/datasources/postgres.md +++ b/docs/sources/features/datasources/postgres.md @@ -101,7 +101,7 @@ The resulting table panel: If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch. Any column except `time` and `metric` is treated as a value column. -You may return a column named `metric` that is used as metric name for the value column. +You may return a column named `metric` that is used as metric name for the value column. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name. **Example with `metric` column:** From f9d6c88a556142791bc6ba0af96ca46dd0dac037 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Tue, 24 Jul 2018 18:31:47 +0200 Subject: [PATCH 230/263] add testcase for metric column as prefix --- pkg/tsdb/postgres/postgres_test.go | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/pkg/tsdb/postgres/postgres_test.go b/pkg/tsdb/postgres/postgres_test.go index 089829bf5901..c7787929a9d5 100644 --- a/pkg/tsdb/postgres/postgres_test.go +++ b/pkg/tsdb/postgres/postgres_test.go @@ -568,6 +568,31 @@ func TestPostgres(t *testing.T) { So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one") }) + Convey("When doing a metric query with metric column and multiple value columns", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT $__timeEpoch(time), measurement as metric, "valueOne", "valueTwo" FROM metric_values ORDER BY 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 4) + So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne") + So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo") + So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne") + So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo") + }) + Convey("When doing a metric query grouping by time should return correct series", func() { query := &tsdb.TsdbQuery{ Queries: []*tsdb.Query{ From 7905c29875a29d230af476e41cb070b13bc9de73 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Tue, 24 Jul 2018 19:25:48 +0200 Subject: [PATCH 231/263] adjust metric prefix code to sql engine refactor --- pkg/tsdb/sql_engine.go | 15 ++++++++++++++- .../postgres/partials/query.editor.html | 5 ++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go index 27ed37923a36..027f37fc2433 100644 --- a/pkg/tsdb/sql_engine.go +++ b/pkg/tsdb/sql_engine.go @@ -229,6 +229,8 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, rowCount := 0 timeIndex := -1 metricIndex := -1 + metricPrefix := false + var metricPrefixValue string // check columns of resultset: a column named time is mandatory // the first text column is treated as metric name unless a column named metric is present @@ -256,6 +258,11 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, } } + // use metric column as prefix with multiple value columns + if metricIndex != -1 && len(columnNames) > 3 { + metricPrefix = true + } + if timeIndex == -1 { return fmt.Errorf("Found no column named %s", strings.Join(e.timeColumnNames, " or ")) } @@ -301,7 +308,11 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, if metricIndex >= 0 { if columnValue, ok := values[metricIndex].(string); ok { - metric = columnValue + if metricPrefix { + metricPrefixValue = columnValue + } else { + metric = columnValue + } } else { return fmt.Errorf("Column metric must be of type %s. metric column name: %s type: %s but datatype is %T", strings.Join(e.metricColumnTypes, ", "), columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex]) } @@ -318,6 +329,8 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows, if metricIndex == -1 { metric = col + } else if metricPrefix { + metric = metricPrefixValue + " " + col } series, exist := pointsBySeries[metric] diff --git a/public/app/plugins/datasource/postgres/partials/query.editor.html b/public/app/plugins/datasource/postgres/partials/query.editor.html index 26392c17356c..b7c12471f521 100644 --- a/public/app/plugins/datasource/postgres/partials/query.editor.html +++ b/public/app/plugins/datasource/postgres/partials/query.editor.html @@ -40,7 +40,10 @@
    Time series:
     - return column named time (UTC in seconds or timestamp)
     - return column(s) with numeric datatype as values
    -- (Optional: return column named metric to represent the series name. If no column named metric is found the column name of the value column is used as series name)
    +Optional: 
    +  - return column named metric to represent the series name. 
    +  - If multiple value columns are returned the metric column is used as prefix. 
    +  - If no column named metric is found the column name of the value column is used as series name
     
     Table:
     - return any set of columns
    
    From 2f6b302375bbe7c562e6df09760f1f4b495b2715 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Fri, 27 Jul 2018 15:51:56 +0200
    Subject: [PATCH 232/263] Test passing. Remove Karma
    
    ---
     public/app/core/specs/backend_srv.jest.ts  | 23 +++++-----------
     public/app/core/specs/backend_srv_specs.ts | 31 ----------------------
     2 files changed, 7 insertions(+), 47 deletions(-)
     delete mode 100644 public/app/core/specs/backend_srv_specs.ts
    
    diff --git a/public/app/core/specs/backend_srv.jest.ts b/public/app/core/specs/backend_srv.jest.ts
    index 2d62716622a7..c65464aa875b 100644
    --- a/public/app/core/specs/backend_srv.jest.ts
    +++ b/public/app/core/specs/backend_srv.jest.ts
    @@ -12,24 +12,15 @@ describe('backend_srv', function() {
     
       let _backendSrv = new BackendSrv(_httpBackend, {}, {}, {}, {});
     
    -  //   beforeEach(angularMocks.module('grafana.core'));
    -  //   beforeEach(angularMocks.module('grafana.services'));
    -  //   beforeEach(
    -  //     angularMocks.inject(function($httpBackend, $http, backendSrv) {
    -  //       _httpBackend = $httpBackend;
    -  //       _backendSrv = backendSrv;
    -  //     })
    -  //   );
    -
       describe('when handling errors', () => {
         it('should return the http status code', async () => {
    -      //   _httpBackend.whenGET('gateway-error').respond(502);
    -      let res = await _backendSrv.datasourceRequest({
    -        url: 'gateway-error',
    -      });
    -      console.log(res);
    -      expect(res.status).toBe(502);
    -      //   _httpBackend.flush();
    +      try {
    +        await _backendSrv.datasourceRequest({
    +          url: 'gateway-error',
    +        });
    +      } catch (err) {
    +        expect(err.status).toBe(502);
    +      }
         });
       });
     });
    diff --git a/public/app/core/specs/backend_srv_specs.ts b/public/app/core/specs/backend_srv_specs.ts
    deleted file mode 100644
    index 74b058b98c82..000000000000
    --- a/public/app/core/specs/backend_srv_specs.ts
    +++ /dev/null
    @@ -1,31 +0,0 @@
    -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
    -import 'app/core/services/backend_srv';
    -
    -describe('backend_srv', function() {
    -  var _backendSrv;
    -  var _httpBackend;
    -
    -  beforeEach(angularMocks.module('grafana.core'));
    -  beforeEach(angularMocks.module('grafana.services'));
    -  beforeEach(
    -    angularMocks.inject(function($httpBackend, $http, backendSrv) {
    -      _httpBackend = $httpBackend;
    -      _backendSrv = backendSrv;
    -    })
    -  );
    -
    -  describe('when handling errors', function() {
    -    it('should return the http status code', function(done) {
    -      _httpBackend.whenGET('gateway-error').respond(502);
    -      _backendSrv
    -        .datasourceRequest({
    -          url: 'gateway-error',
    -        })
    -        .catch(function(err) {
    -          expect(err.status).to.be(502);
    -          done();
    -        });
    -      _httpBackend.flush();
    -    });
    -  });
    -});
    
    From c11d0f5cc6289b708d1e0d7c072de7eb6b1b8422 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Fri, 27 Jul 2018 15:52:22 +0200
    Subject: [PATCH 233/263] Remove lo
    
    ---
     public/app/core/specs/backend_srv.jest.ts | 1 -
     1 file changed, 1 deletion(-)
    
    diff --git a/public/app/core/specs/backend_srv.jest.ts b/public/app/core/specs/backend_srv.jest.ts
    index c65464aa875b..b19bd1177665 100644
    --- a/public/app/core/specs/backend_srv.jest.ts
    +++ b/public/app/core/specs/backend_srv.jest.ts
    @@ -3,7 +3,6 @@ jest.mock('app/core/store');
     
     describe('backend_srv', function() {
       let _httpBackend = options => {
    -    console.log(options);
         if (options.url === 'gateway-error') {
           return Promise.reject({ status: 502 });
         }
    
    From 895b4b40eee4af0ee79b0935856ff1c532ebeb94 Mon Sep 17 00:00:00 2001
    From: Worty <6840978+Worty@users.noreply.github.com>
    Date: Fri, 27 Jul 2018 16:26:04 +0200
    Subject: [PATCH 234/263] correct volume unit
    
    ---
     public/app/core/specs/kbn.jest.ts |  2 +-
     public/app/core/utils/kbn.ts      | 36 +++++++++++++++----------------
     2 files changed, 19 insertions(+), 19 deletions(-)
    
    diff --git a/public/app/core/specs/kbn.jest.ts b/public/app/core/specs/kbn.jest.ts
    index 689450680431..9c62990615c0 100644
    --- a/public/app/core/specs/kbn.jest.ts
    +++ b/public/app/core/specs/kbn.jest.ts
    @@ -402,7 +402,7 @@ describe('duration', function() {
     describe('volume', function() {
       it('1000m3', function() {
         var str = kbn.valueFormats['m3'](1000, 1, null);
    -    expect(str).toBe('1000.0 m3');
    +    expect(str).toBe('1000.0 m³');
       });
     });
     
    diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts
    index 4fc4829811f1..74ef2a9e8740 100644
    --- a/public/app/core/utils/kbn.ts
    +++ b/public/app/core/utils/kbn.ts
    @@ -572,9 +572,9 @@ kbn.valueFormats.accG = kbn.formatBuilders.fixedUnit('g');
     // Volume
     kbn.valueFormats.litre = kbn.formatBuilders.decimalSIPrefix('L');
     kbn.valueFormats.mlitre = kbn.formatBuilders.decimalSIPrefix('L', -1);
    -kbn.valueFormats.m3 = kbn.formatBuilders.fixedUnit('m3');
    -kbn.valueFormats.Nm3 = kbn.formatBuilders.fixedUnit('Nm3');
    -kbn.valueFormats.dm3 = kbn.formatBuilders.fixedUnit('dm3');
    +kbn.valueFormats.m3 = kbn.formatBuilders.fixedUnit('m³');
    +kbn.valueFormats.Nm3 = kbn.formatBuilders.fixedUnit('Nm³');
    +kbn.valueFormats.dm3 = kbn.formatBuilders.fixedUnit('dm³');
     kbn.valueFormats.gallons = kbn.formatBuilders.fixedUnit('gal');
     
     // Flow
    @@ -605,14 +605,14 @@ kbn.valueFormats.radsvh = kbn.formatBuilders.decimalSIPrefix('Sv/h');
     // Concentration
     kbn.valueFormats.ppm = kbn.formatBuilders.fixedUnit('ppm');
     kbn.valueFormats.conppb = kbn.formatBuilders.fixedUnit('ppb');
    -kbn.valueFormats.conngm3 = kbn.formatBuilders.fixedUnit('ng/m3');
    -kbn.valueFormats.conngNm3 = kbn.formatBuilders.fixedUnit('ng/Nm3');
    -kbn.valueFormats.conμgm3 = kbn.formatBuilders.fixedUnit('μg/m3');
    -kbn.valueFormats.conμgNm3 = kbn.formatBuilders.fixedUnit('μg/Nm3');
    -kbn.valueFormats.conmgm3 = kbn.formatBuilders.fixedUnit('mg/m3');
    -kbn.valueFormats.conmgNm3 = kbn.formatBuilders.fixedUnit('mg/Nm3');
    -kbn.valueFormats.congm3 = kbn.formatBuilders.fixedUnit('g/m3');
    -kbn.valueFormats.congNm3 = kbn.formatBuilders.fixedUnit('g/Nm3');
    +kbn.valueFormats.conngm3 = kbn.formatBuilders.fixedUnit('ng/m³');
    +kbn.valueFormats.conngNm3 = kbn.formatBuilders.fixedUnit('ng/Nm³');
    +kbn.valueFormats.conμgm3 = kbn.formatBuilders.fixedUnit('μg/m³');
    +kbn.valueFormats.conμgNm3 = kbn.formatBuilders.fixedUnit('μg/Nm³');
    +kbn.valueFormats.conmgm3 = kbn.formatBuilders.fixedUnit('mg/m³');
    +kbn.valueFormats.conmgNm3 = kbn.formatBuilders.fixedUnit('mg/Nm³');
    +kbn.valueFormats.congm3 = kbn.formatBuilders.fixedUnit('g/m³');
    +kbn.valueFormats.congNm3 = kbn.formatBuilders.fixedUnit('g/Nm³');
     
     // Time
     kbn.valueFormats.hertz = kbn.formatBuilders.decimalSIPrefix('Hz');
    @@ -1119,13 +1119,13 @@ kbn.getUnitFormats = function() {
             { text: 'parts-per-million (ppm)', value: 'ppm' },
             { text: 'parts-per-billion (ppb)', value: 'conppb' },
             { text: 'nanogram per cubic metre (ng/m3)', value: 'conngm3' },
    -        { text: 'nanogram per normal cubic metre (ng/Nm3)', value: 'conngNm3' },
    -        { text: 'microgram per cubic metre (μg/m3)', value: 'conμgm3' },
    -        { text: 'microgram per normal cubic metre (μg/Nm3)', value: 'conμgNm3' },
    -        { text: 'milligram per cubic metre (mg/m3)', value: 'conmgm3' },
    -        { text: 'milligram per normal cubic metre (mg/Nm3)', value: 'conmgNm3' },
    -        { text: 'gram per cubic metre (g/m3)', value: 'congm3' },
    -        { text: 'gram per normal cubic metre (g/Nm3)', value: 'congNm3' },
    +        { text: 'nanogram per normal cubic metre (ng/Nm³)', value: 'conngNm3' },
    +        { text: 'microgram per cubic metre (μg/m³)', value: 'conμgm3' },
    +        { text: 'microgram per normal cubic metre (μg/Nm³)', value: 'conμgNm3' },
    +        { text: 'milligram per cubic metre (mg/m³)', value: 'conmgm3' },
    +        { text: 'milligram per normal cubic metre (mg/Nm³)', value: 'conmgNm3' },
    +        { text: 'gram per cubic metre (g/m³)', value: 'congm3' },
    +        { text: 'gram per normal cubic metre (g/Nm³)', value: 'congNm3' },
           ],
         },
       ];
    
    From 26f709e87ea5d551b46f3b15909165aee732e298 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Fri, 27 Jul 2018 16:45:03 +0200
    Subject: [PATCH 235/263] Karm to Jest
    
    ---
     ...map_ctrl_specs.ts => heatmap_ctrl.jest.ts} | 44 ++++++++++---------
     1 file changed, 24 insertions(+), 20 deletions(-)
     rename public/app/plugins/panel/heatmap/specs/{heatmap_ctrl_specs.ts => heatmap_ctrl.jest.ts} (61%)
    
    diff --git a/public/app/plugins/panel/heatmap/specs/heatmap_ctrl_specs.ts b/public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts
    similarity index 61%
    rename from public/app/plugins/panel/heatmap/specs/heatmap_ctrl_specs.ts
    rename to public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts
    index 98055ccf52dd..70449763856c 100644
    --- a/public/app/plugins/panel/heatmap/specs/heatmap_ctrl_specs.ts
    +++ b/public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts
    @@ -1,26 +1,30 @@
    -import { describe, beforeEach, it, expect, angularMocks } from '../../../../../test/lib/common';
    -
     import moment from 'moment';
     import { HeatmapCtrl } from '../heatmap_ctrl';
    -import helpers from '../../../../../test/specs/helpers';
     
     describe('HeatmapCtrl', function() {
    -  var ctx = new helpers.ControllerTestContext();
    +  let ctx = {};
     
    -  beforeEach(angularMocks.module('grafana.services'));
    -  beforeEach(angularMocks.module('grafana.controllers'));
    -  beforeEach(
    -    angularMocks.module(function($compileProvider) {
    -      $compileProvider.preAssignBindingsEnabled(true);
    -    })
    -  );
    +  let $injector = {
    +      get: () => {}
    +  };
     
    -  beforeEach(ctx.providePhase());
    -  beforeEach(ctx.createPanelController(HeatmapCtrl));
    -  beforeEach(() => {
    -    ctx.ctrl.annotationsPromise = Promise.resolve({});
    -    ctx.ctrl.updateTimeRange();
    -  });
    +  let $scope = {
    +    $on: () => {},
    +    events: {
    +        on: () => {}
    +    }
    +  };
    +
    +HeatmapCtrl.prototype.panel = {
    +    events: {
    +        on: () => {},
    +        emit: () => {}
    +    }
    +};
    +
    +    beforeEach(() => {
    +        ctx.ctrl = new HeatmapCtrl($scope, $injector, {});
    +    });
     
       describe('when time series are outside range', function() {
         beforeEach(function() {
    @@ -36,7 +40,7 @@ describe('HeatmapCtrl', function() {
         });
     
         it('should set datapointsOutside', function() {
    -      expect(ctx.ctrl.dataWarning.title).to.be('Data points outside time range');
    +      expect(ctx.ctrl.dataWarning.title).toBe('Data points outside time range');
         });
       });
     
    @@ -61,7 +65,7 @@ describe('HeatmapCtrl', function() {
         });
     
         it('should set datapointsOutside', function() {
    -      expect(ctx.ctrl.dataWarning).to.be(null);
    +      expect(ctx.ctrl.dataWarning).toBe(null);
         });
       });
     
    @@ -72,7 +76,7 @@ describe('HeatmapCtrl', function() {
         });
     
         it('should set datapointsCount warning', function() {
    -      expect(ctx.ctrl.dataWarning.title).to.be('No data points');
    +      expect(ctx.ctrl.dataWarning.title).toBe('No data points');
         });
       });
     });
    
    From 805dc3542f780c57f477c61cf9cf475515aa3760 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Fri, 27 Jul 2018 16:46:41 +0200
    Subject: [PATCH 236/263] Remove extra mock
    
    ---
     .../panel/heatmap/specs/heatmap_ctrl.jest.ts  | 21 ++++++++-----------
     1 file changed, 9 insertions(+), 12 deletions(-)
    
    diff --git a/public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts b/public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts
    index 70449763856c..800c2518f9a9 100644
    --- a/public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts
    +++ b/public/app/plugins/panel/heatmap/specs/heatmap_ctrl.jest.ts
    @@ -5,26 +5,23 @@ describe('HeatmapCtrl', function() {
       let ctx = {};
     
       let $injector = {
    -      get: () => {}
    +    get: () => {},
       };
     
       let $scope = {
         $on: () => {},
    -    events: {
    -        on: () => {}
    -    }
       };
     
    -HeatmapCtrl.prototype.panel = {
    +  HeatmapCtrl.prototype.panel = {
         events: {
    -        on: () => {},
    -        emit: () => {}
    -    }
    -};
    +      on: () => {},
    +      emit: () => {},
    +    },
    +  };
     
    -    beforeEach(() => {
    -        ctx.ctrl = new HeatmapCtrl($scope, $injector, {});
    -    });
    +  beforeEach(() => {
    +    ctx.ctrl = new HeatmapCtrl($scope, $injector, {});
    +  });
     
       describe('when time series are outside range', function() {
         beforeEach(function() {
    
    From bc9b6ddefe9c982b778d699c7c445db081982fbd Mon Sep 17 00:00:00 2001
    From: Sven Klemm 
    Date: Fri, 27 Jul 2018 17:14:27 +0200
    Subject: [PATCH 237/263] document metric column prefix for mysql and mssql
    
    ---
     docs/sources/features/datasources/mssql.md | 2 +-
     docs/sources/features/datasources/mysql.md | 2 +-
     2 files changed, 2 insertions(+), 2 deletions(-)
    
    diff --git a/docs/sources/features/datasources/mssql.md b/docs/sources/features/datasources/mssql.md
    index d4d5cc6d73ec..bcb965dda74d 100644
    --- a/docs/sources/features/datasources/mssql.md
    +++ b/docs/sources/features/datasources/mssql.md
    @@ -148,7 +148,7 @@ The resulting table panel:
     
     ## Time series queries
     
    -If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must must have a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. You may return a column named `metric` that is used as metric name for the value column. Any column except `time` and `metric` is treated as a value column. If you omit the `metric` column, tha name of the value column will be the metric name. You may select multiple value columns, each will have its name as metric.
    +If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must must have a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. You may return a column named `metric` that is used as metric name for the value column. Any column except `time` and `metric` is treated as a value column. If you omit the `metric` column, tha name of the value column will be the metric name. You may select multiple value columns, each will have its name as metric. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name.
     
     **Example database table:**
     
    diff --git a/docs/sources/features/datasources/mysql.md b/docs/sources/features/datasources/mysql.md
    index ce50053c7eab..c6e620eb08b2 100644
    --- a/docs/sources/features/datasources/mysql.md
    +++ b/docs/sources/features/datasources/mysql.md
    @@ -103,7 +103,7 @@ The resulting table panel:
     
     If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch.
     Any column except `time` and `metric` is treated as a value column.
    -You may return a column named `metric` that is used as metric name for the value column.
    +You may return a column named `metric` that is used as metric name for the value column. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name.
     
     **Example with `metric` column:**
     
    
    From 036647ae35b9e6799d5af9b984a47a5907c40d6a Mon Sep 17 00:00:00 2001
    From: Sven Klemm 
    Date: Fri, 27 Jul 2018 17:18:45 +0200
    Subject: [PATCH 238/263] document metric column prefix in query editor
    
    ---
     .../app/plugins/datasource/mssql/partials/query.editor.html | 6 ++++--
     .../app/plugins/datasource/mysql/partials/query.editor.html | 5 ++++-
     2 files changed, 8 insertions(+), 3 deletions(-)
    
    diff --git a/public/app/plugins/datasource/mssql/partials/query.editor.html b/public/app/plugins/datasource/mssql/partials/query.editor.html
    index ddc24475d607..397a35164c08 100644
    --- a/public/app/plugins/datasource/mssql/partials/query.editor.html
    +++ b/public/app/plugins/datasource/mssql/partials/query.editor.html
    @@ -39,9 +39,11 @@
     	
    Time series:
     - return column named time (in UTC), as a unix time stamp or any sql native date data type. You can use the macros below.
    -- optional: return column named metric to represent the series names.
     - any other columns returned will be the time point values.
    -- if multiple value columns are present and a metric column is provided. the series name will be the combination of "MetricName - ValueColumnName".
    +Optional:
    +  - return column named metric to represent the series name.
    +  - If multiple value columns are returned the metric column is used as prefix.
    +  - If no column named metric is found the column name of the value column is used as series name
     
     Table:
     - return any set of columns
    diff --git a/public/app/plugins/datasource/mysql/partials/query.editor.html b/public/app/plugins/datasource/mysql/partials/query.editor.html
    index df68982fcfa1..d4be22fc3e90 100644
    --- a/public/app/plugins/datasource/mysql/partials/query.editor.html
    +++ b/public/app/plugins/datasource/mysql/partials/query.editor.html
    @@ -40,7 +40,10 @@
     		
    Time series:
     - return column named time or time_sec (in UTC), as a unix time stamp or any sql native date data type. You can use the macros below.
     - return column(s) with numeric datatype as values
    -- (Optional: return column named metric to represent the series name. If no column named metric is found the column name of the value column is used as series name)
    +Optional:
    +  - return column named metric to represent the series name.
    +  - If multiple value columns are returned the metric column is used as prefix.
    +  - If no column named metric is found the column name of the value column is used as series name
     
     Table:
     - return any set of columns
    
    From e487fabcd56f5a04b8fa5a6cba6a020855f2d062 Mon Sep 17 00:00:00 2001
    From: Sven Klemm 
    Date: Fri, 27 Jul 2018 17:54:51 +0200
    Subject: [PATCH 239/263] add metric column prefix test for mysql
    
    ---
     pkg/tsdb/mysql/mysql_test.go | 25 +++++++++++++++++++++++++
     1 file changed, 25 insertions(+)
    
    diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go
    index 3b4e283b726b..9947c23498bd 100644
    --- a/pkg/tsdb/mysql/mysql_test.go
    +++ b/pkg/tsdb/mysql/mysql_test.go
    @@ -634,6 +634,31 @@ func TestMySQL(t *testing.T) {
     				So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
     			})
     
    +			Convey("When doing a metric query with metric column and multiple value columns", func() {
    +				query := &tsdb.TsdbQuery{
    +					Queries: []*tsdb.Query{
    +						{
    +							Model: simplejson.NewFromAny(map[string]interface{}{
    +								"rawSql": `SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values ORDER BY 1,2`,
    +								"format": "time_series",
    +							}),
    +							RefId: "A",
    +						},
    +					},
    +				}
    +
    +				resp, err := endpoint.Query(nil, nil, query)
    +				So(err, ShouldBeNil)
    +				queryResult := resp.Results["A"]
    +				So(queryResult.Error, ShouldBeNil)
    +
    +				So(len(queryResult.Series), ShouldEqual, 4)
    +				So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
    +				So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
    +				So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
    +				So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
    +			})
    +
     			Convey("When doing a metric query grouping by time should return correct series", func() {
     				query := &tsdb.TsdbQuery{
     					Queries: []*tsdb.Query{
    
    From 3aa4790979cf457a26754afd67f5235fc3345f62 Mon Sep 17 00:00:00 2001
    From: Sven Klemm 
    Date: Fri, 27 Jul 2018 18:13:19 +0200
    Subject: [PATCH 240/263] add tests for metric column prefix to mssql
    
    ---
     pkg/tsdb/mssql/mssql_test.go | 25 +++++++++++++++++++++++++
     1 file changed, 25 insertions(+)
    
    diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go
    index 86484cb9d5e4..8e3d617ca09b 100644
    --- a/pkg/tsdb/mssql/mssql_test.go
    +++ b/pkg/tsdb/mssql/mssql_test.go
    @@ -610,6 +610,31 @@ func TestMSSQL(t *testing.T) {
     				So(queryResult.Series[1].Name, ShouldEqual, "valueTwo")
     			})
     
    +			Convey("When doing a metric query with metric column and multiple value columns", func() {
    +				query := &tsdb.TsdbQuery{
    +					Queries: []*tsdb.Query{
    +						{
    +							Model: simplejson.NewFromAny(map[string]interface{}{
    +								"rawSql": "SELECT $__timeEpoch(time), measurement AS metric, valueOne, valueTwo FROM metric_values ORDER BY 1",
    +								"format": "time_series",
    +							}),
    +							RefId: "A",
    +						},
    +					},
    +				}
    +
    +				resp, err := endpoint.Query(nil, nil, query)
    +				So(err, ShouldBeNil)
    +				queryResult := resp.Results["A"]
    +				So(queryResult.Error, ShouldBeNil)
    +
    +				So(len(queryResult.Series), ShouldEqual, 4)
    +				So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
    +				So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
    +				So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
    +				So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
    +			})
    +
     			Convey("Given a stored procedure that takes @from and @to in epoch time", func() {
     				sql := `
     						IF object_id('sp_test_epoch') IS NOT NULL
    
    From e37e8cb38c649796db57a39868d4c3c79ddab9fd Mon Sep 17 00:00:00 2001
    From: Jan Garaj 
    Date: Mon, 30 Jul 2018 08:02:16 +0100
    Subject: [PATCH 241/263] Add missing tls_skip_verify_insecure (#12748)
    
    ---
     conf/defaults.ini | 1 +
     1 file changed, 1 insertion(+)
    
    diff --git a/conf/defaults.ini b/conf/defaults.ini
    index 5faba3ea7bd4..6c27886c649f 100644
    --- a/conf/defaults.ini
    +++ b/conf/defaults.ini
    @@ -311,6 +311,7 @@ token_url =
     api_url =
     team_ids =
     allowed_organizations =
    +tls_skip_verify_insecure = false
     
     #################################### Basic Auth ##########################
     [auth.basic]
    
    From 13a7b638bcc90ff6abcf00a388d8dfbedf01a8b2 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 10:19:51 +0200
    Subject: [PATCH 242/263] changelog: add notes about closing #12747
    
    [skip ci]
    ---
     CHANGELOG.md | 1 +
     1 file changed, 1 insertion(+)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index ad1b63234e9f..4a2c3c7a0af9 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -24,6 +24,7 @@
     * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
     * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
     * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
    +* **Auth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
     
     # 5.2.2 (2018-07-25)
     
    
    From e4983cba2fc17de8523814b7126e5c2d858ac569 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 10:21:22 +0200
    Subject: [PATCH 243/263] changelog: update
    
    [skip ci]
    ---
     CHANGELOG.md | 2 +-
     1 file changed, 1 insertion(+), 1 deletion(-)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index 4a2c3c7a0af9..b8f5bced9727 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -24,7 +24,7 @@
     * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
     * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
     * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
    -* **Auth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
    +* **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
     
     # 5.2.2 (2018-07-25)
     
    
    From 3d4a346c6621c6e685d338dc95aed0221c84c541 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Mon, 30 Jul 2018 13:02:08 +0200
    Subject: [PATCH 244/263] Begin conversion
    
    ---
     .../prometheus/specs/_datasource.jest.ts      | 792 ++++++++++++++++++
     1 file changed, 792 insertions(+)
     create mode 100644 public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    
    diff --git a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    new file mode 100644
    index 000000000000..384abc8f9027
    --- /dev/null
    +++ b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    @@ -0,0 +1,792 @@
    +import moment from 'moment';
    +import { PrometheusDatasource } from '../datasource';
    +import $q from 'q';
    +
    +const SECOND = 1000;
    +const MINUTE = 60 * SECOND;
    +const HOUR = 60 * MINUTE;
    +
    +const time = ({ hours = 0, seconds = 0, minutes = 0 }) => moment(hours * HOUR + minutes * MINUTE + seconds * SECOND);
    +
    +let ctx = {};
    +let instanceSettings = {
    +  url: 'proxied',
    +  directUrl: 'direct',
    +  user: 'test',
    +  password: 'mupp',
    +  jsonData: { httpMethod: 'GET' },
    +};
    +let backendSrv = {
    +  datasourceRequest: jest.fn(),
    +};
    +
    +let templateSrv = {
    +  replace: (target, scopedVars, format) => {
    +    if (!target) {
    +      return target;
    +    }
    +    let variable, value, fmt;
    +
    +    return target.replace(scopedVars, (match, var1, var2, fmt2, var3, fmt3) => {
    +      variable = this.index[var1 || var2 || var3];
    +      fmt = fmt2 || fmt3 || format;
    +      if (scopedVars) {
    +        value = scopedVars[var1 || var2 || var3];
    +        if (value) {
    +          return this.formatValue(value.value, fmt, variable);
    +        }
    +      }
    +    });
    +  },
    +};
    +
    +let timeSrv = {
    +  timeRange: () => {
    +    return { to: { diff: () => 2000 }, from: '' };
    +  },
    +};
    +
    +describe('PrometheusDatasource', function() {
    +  //   beforeEach(angularMocks.module('grafana.core'));
    +  //   beforeEach(angularMocks.module('grafana.services'));
    +  //   beforeEach(ctx.providePhase(['timeSrv']));
    +
    +  //   beforeEach(
    +  //     angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
    +  //       ctx.$q = $q;
    +  //       ctx.$httpBackend = $httpBackend;
    +  //       ctx.$rootScope = $rootScope;
    +  //       ctx.ds = $injector.instantiate(PrometheusDatasource, {
    +  //         instanceSettings: instanceSettings,
    +  //       });
    +  //       $httpBackend.when('GET', /\.html$/).respond('');
    +  //     })
    +  //   );
    +
    +  beforeEach(() => {
    +    ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +  });
    +  describe('When querying prometheus with one target using query editor target spec', function() {
    +    var results;
    +    var query = {
    +      range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    +      interval: '60s',
    +    };
    +    // Interval alignment with step
    +    var urlExpected =
    +      'proxied/api/v1/query_range?query=' + encodeURIComponent('test{job="testjob"}') + '&start=60&end=240&step=60';
    +    var response = {
    +      data: {
    +        status: 'success',
    +        data: {
    +          resultType: 'matrix',
    +          result: [
    +            {
    +              metric: { __name__: 'test', job: 'testjob' },
    +              values: [[60, '3846']],
    +            },
    +          ],
    +        },
    +      },
    +    };
    +    beforeEach(async () => {
    +      //   ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +      //   ctx.$httpBackend.flush();
    +    });
    +    it('should generate the correct query', function() {
    +      //   ctx.$httpBackend.verifyNoOutstandingExpectation();
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should return series list', function() {
    +      expect(results.data.length).toBe(1);
    +      expect(results.data[0].target).toBe('test{job="testjob"}');
    +    });
    +  });
    +  describe('When querying prometheus with one target which return multiple series', function() {
    +    var results;
    +    var start = 60;
    +    var end = 360;
    +    var step = 60;
    +    // var urlExpected =
    +    //   'proxied/api/v1/query_range?query=' +
    +    //   encodeURIComponent('test{job="testjob"}') +
    +    //   '&start=' +
    +    //   start +
    +    //   '&end=' +
    +    //   end +
    +    //   '&step=' +
    +    //   step;
    +    var query = {
    +      range: { from: time({ seconds: start }), to: time({ seconds: end }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    +      interval: '60s',
    +    };
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [
    +            {
    +              metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
    +              values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
    +            },
    +            {
    +              metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
    +              values: [[start + step * 2, '4846']],
    +            },
    +          ],
    +        },
    +      },
    +    };
    +    beforeEach(async () => {
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should be same length', function() {
    +      expect(results.data.length).toBe(2);
    +      expect(results.data[0].datapoints.length).toBe((end - start) / step + 1);
    +      expect(results.data[1].datapoints.length).toBe((end - start) / step + 1);
    +    });
    +    it('should fill null until first datapoint in response', function() {
    +      expect(results.data[0].datapoints[0][1]).toBe(start * 1000);
    +      expect(results.data[0].datapoints[0][0]).toBe(null);
    +      expect(results.data[0].datapoints[1][1]).toBe((start + step * 1) * 1000);
    +      expect(results.data[0].datapoints[1][0]).toBe(3846);
    +    });
    +    it('should fill null after last datapoint in response', function() {
    +      var length = (end - start) / step + 1;
    +      expect(results.data[0].datapoints[length - 2][1]).toBe((end - step * 1) * 1000);
    +      expect(results.data[0].datapoints[length - 2][0]).toBe(3848);
    +      expect(results.data[0].datapoints[length - 1][1]).toBe(end * 1000);
    +      expect(results.data[0].datapoints[length - 1][0]).toBe(null);
    +    });
    +    it('should fill null at gap between series', function() {
    +      expect(results.data[0].datapoints[2][1]).toBe((start + step * 2) * 1000);
    +      expect(results.data[0].datapoints[2][0]).toBe(null);
    +      expect(results.data[1].datapoints[1][1]).toBe((start + step * 1) * 1000);
    +      expect(results.data[1].datapoints[1][0]).toBe(null);
    +      expect(results.data[1].datapoints[3][1]).toBe((start + step * 3) * 1000);
    +      expect(results.data[1].datapoints[3][0]).toBe(null);
    +    });
    +  });
    +  describe('When querying prometheus with one target and instant = true', function() {
    +    var results;
    +    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    +    var query = {
    +      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    +      interval: '60s',
    +    };
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'vector',
    +          result: [
    +            {
    +              metric: { __name__: 'test', job: 'testjob' },
    +              value: [123, '3846'],
    +            },
    +          ],
    +        },
    +      },
    +    };
    +    beforeEach(async () => {
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should generate the correct query', function() {
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should return series list', function() {
    +      expect(results.data.length).toBe(1);
    +      expect(results.data[0].target).toBe('test{job="testjob"}');
    +    });
    +  });
    +  describe('When performing annotationQuery', function() {
    +    var results;
    +    // var urlExpected =
    +    //   'proxied/api/v1/query_range?query=' +
    +    //   encodeURIComponent('ALERTS{alertstate="firing"}') +
    +    //   '&start=60&end=180&step=60';
    +    var options = {
    +      annotation: {
    +        expr: 'ALERTS{alertstate="firing"}',
    +        tagKeys: 'job',
    +        titleFormat: '{{alertname}}',
    +        textFormat: '{{instance}}',
    +      },
    +      range: {
    +        from: time({ seconds: 63 }),
    +        to: time({ seconds: 123 }),
    +      },
    +    };
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [
    +            {
    +              metric: {
    +                __name__: 'ALERTS',
    +                alertname: 'InstanceDown',
    +                alertstate: 'firing',
    +                instance: 'testinstance',
    +                job: 'testjob',
    +              },
    +              values: [[123, '1']],
    +            },
    +          ],
    +        },
    +      },
    +    };
    +    beforeEach(async () => {
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.annotationQuery(options).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should return annotation list', function() {
    +      //   ctx.$rootScope.$apply();
    +      expect(results.length).toBe(1);
    +      expect(results[0].tags).toContain('testjob');
    +      expect(results[0].title).toBe('InstanceDown');
    +      expect(results[0].text).toBe('testinstance');
    +      expect(results[0].time).toBe(123 * 1000);
    +    });
    +  });
    +
    +  describe('When resultFormat is table and instant = true', function() {
    +    var results;
    +    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    +    var query = {
    +      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    +      interval: '60s',
    +    };
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'vector',
    +          result: [
    +            {
    +              metric: { __name__: 'test', job: 'testjob' },
    +              value: [123, '3846'],
    +            },
    +          ],
    +        },
    +      },
    +    };
    +
    +    beforeEach(async () => {
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +
    +    it('should return result', () => {
    +      expect(results).not.toBe(null);
    +    });
    +  });
    +
    +  describe('The "step" query parameter', function() {
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [],
    +        },
    +      },
    +    };
    +
    +    it('should be min interval when greater than auto interval', async () => {
    +      let query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '10s',
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      let urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +
    +    it('step should never go below 1', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [{ expr: 'test' }],
    +        interval: '100ms',
    +      };
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=1';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +
    +    it('should be auto interval when greater than min interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '5s',
    +          },
    +        ],
    +        interval: '10s',
    +      };
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should result in querying fewer than 11000 data points', async () => {
    +      var query = {
    +        // 6 hour range
    +        range: { from: time({ hours: 1 }), to: time({ hours: 7 }) },
    +        targets: [{ expr: 'test' }],
    +        interval: '1s',
    +      };
    +      var end = 7 * 60 * 60;
    +      var start = 60 * 60;
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=' + start + '&end=' + end + '&step=2';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should not apply min interval when interval * intervalFactor greater', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '10s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      // times get rounded up to interval
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=50&end=450&step=50';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should apply min interval when interval * intervalFactor smaller', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '15s',
    +            intervalFactor: 2,
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=60&end=420&step=15';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should apply intervalFactor to auto interval when greater', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '5s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '10s',
    +      };
    +      // times get aligned to interval
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=0&end=500&step=100';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should not not be affected by the 11000 data points limit when large enough', async () => {
    +      var query = {
    +        // 1 week range
    +        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '10s',
    +      };
    +      var end = 7 * 24 * 60 * 60;
    +      var start = 0;
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=100';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should be determined by the 11000 data points limit when too small', async () => {
    +      var query = {
    +        // 1 week range
    +        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      var end = 7 * 24 * 60 * 60;
    +      var start = 0;
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=60';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +  });
    +
    +  describe('The __interval and __interval_ms template variables', function() {
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [],
    +        },
    +      },
    +    };
    +
    +    it('should be unchanged when auto interval is greater than min interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '5s',
    +          },
    +        ],
    +        interval: '10s',
    +        scopedVars: {
    +          __interval: { text: '10s', value: '10s' },
    +          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(query.scopedVars.__interval.text).toBe('10s');
    +      expect(query.scopedVars.__interval.value).toBe('10s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(10 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(10 * 1000);
    +    });
    +    it('should be min interval when it is greater than auto interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '10s',
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(query.scopedVars.__interval.text).toBe('5s');
    +      expect(query.scopedVars.__interval.value).toBe('5s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +    });
    +    it('should account for intervalFactor', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '5s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '10s',
    +        scopedVars: {
    +          __interval: { text: '10s', value: '10s' },
    +          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[100s])') + '&start=0&end=500&step=100';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(query.scopedVars.__interval.text).toBe('10s');
    +      expect(query.scopedVars.__interval.value).toBe('10s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(10 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(10 * 1000);
    +    });
    +    it('should be interval * intervalFactor when greater than min interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '10s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[50s])') + '&start=50&end=450&step=50';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(query.scopedVars.__interval.text).toBe('5s');
    +      expect(query.scopedVars.__interval.value).toBe('5s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +    });
    +    it('should be min interval when greater than interval * intervalFactor', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '15s',
    +            intervalFactor: 2,
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[15s])') + '&start=60&end=420&step=15';
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(query.scopedVars.__interval.text).toBe('5s');
    +      expect(query.scopedVars.__interval.value).toBe('5s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +    });
    +    it('should be determined by the 11000 data points limit, accounting for intervalFactor', async () => {
    +      var query = {
    +        // 1 week range
    +        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var end = 7 * 24 * 60 * 60;
    +      var start = 0;
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[60s])') +
    +        '&start=' +
    +        start +
    +        '&end=' +
    +        end +
    +        '&step=60';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(query.scopedVars.__interval.text).toBe('5s');
    +      expect(query.scopedVars.__interval.value).toBe('5s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +    });
    +  });
    +});
    +
    +describe('PrometheusDatasource for POST', function() {
    +  //   var ctx = new helpers.ServiceTestContext();
    +  let instanceSettings = {
    +    url: 'proxied',
    +    directUrl: 'direct',
    +    user: 'test',
    +    password: 'mupp',
    +    jsonData: { httpMethod: 'POST' },
    +  };
    +
    +  //   beforeEach(angularMocks.module('grafana.core'));
    +  //   beforeEach(angularMocks.module('grafana.services'));
    +  //   beforeEach(ctx.providePhase(['timeSrv']));
    +
    +  //   beforeEach(
    +  //     // angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
    +  //     //   ctx.$q = $q;
    +  //     //   ctx.$httpBackend = $httpBackend;
    +  //     //   ctx.$rootScope = $rootScope;
    +  //     //   ctx.ds = $injector.instantiate(PrometheusDatasource, { instanceSettings: instanceSettings });
    +  //     //   $httpBackend.when('GET', /\.html$/).respond('');
    +  //     // })
    +  //   );
    +
    +  describe('When querying prometheus with one target using query editor target spec', function() {
    +    var results;
    +    var urlExpected = 'proxied/api/v1/query_range';
    +    var dataExpected = {
    +      query: 'test{job="testjob"}',
    +      start: 1 * 60,
    +      end: 3 * 60,
    +      step: 60,
    +    };
    +    var query = {
    +      range: { from: time({ minutes: 1, seconds: 3 }), to: time({ minutes: 2, seconds: 3 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    +      interval: '60s',
    +    };
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [
    +            {
    +              metric: { __name__: 'test', job: 'testjob' },
    +              values: [[2 * 60, '3846']],
    +            },
    +          ],
    +        },
    +      },
    +    };
    +    beforeEach(async () => {
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should generate the correct query', function() {
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('POST');
    +      expect(res.url).toBe(urlExpected);
    +      expect(res.data).toEqual(dataExpected);
    +    });
    +    it('should return series list', function() {
    +      expect(results.data.length).toBe(1);
    +      expect(results.data[0].target).toBe('test{job="testjob"}');
    +    });
    +  });
    +});
    
    From e32cf75c2d3caca0d62e3296701d63c9135e2233 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 13:50:18 +0200
    Subject: [PATCH 245/263] fix usage of metric column types so that you don't
     need to specify metric alias
    
    ---
     pkg/tsdb/sql_engine.go | 5 +++++
     1 file changed, 5 insertions(+)
    
    diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go
    index 027f37fc2433..29428971c641 100644
    --- a/pkg/tsdb/sql_engine.go
    +++ b/pkg/tsdb/sql_engine.go
    @@ -75,6 +75,10 @@ var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, rowTransfo
     		queryEndpoint.timeColumnNames = config.TimeColumnNames
     	}
     
    +	if len(config.MetricColumnTypes) > 0 {
    +		queryEndpoint.metricColumnTypes = config.MetricColumnTypes
    +	}
    +
     	engineCache.Lock()
     	defer engineCache.Unlock()
     
    @@ -249,6 +253,7 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows,
     				columnType := columnTypes[i].DatabaseTypeName()
     
     				for _, mct := range e.metricColumnTypes {
    +					e.log.Info(mct)
     					if columnType == mct {
     						metricIndex = i
     						continue
    
    From 38a52c2489853eaff1ce036b864f736c59c9ba49 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 13:50:52 +0200
    Subject: [PATCH 246/263] mssql: update tests
    
    ---
     pkg/tsdb/mssql/mssql_test.go | 54 ++++++++++--------------------------
     1 file changed, 15 insertions(+), 39 deletions(-)
    
    diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go
    index 8e3d617ca09b..30d1da3bda19 100644
    --- a/pkg/tsdb/mssql/mssql_test.go
    +++ b/pkg/tsdb/mssql/mssql_test.go
    @@ -615,7 +615,7 @@ func TestMSSQL(t *testing.T) {
     					Queries: []*tsdb.Query{
     						{
     							Model: simplejson.NewFromAny(map[string]interface{}{
    -								"rawSql": "SELECT $__timeEpoch(time), measurement AS metric, valueOne, valueTwo FROM metric_values ORDER BY 1",
    +								"rawSql": "SELECT $__timeEpoch(time), measurement, valueOne, valueTwo FROM metric_values ORDER BY 1",
     								"format": "time_series",
     							}),
     							RefId: "A",
    @@ -660,21 +660,9 @@ func TestMSSQL(t *testing.T) {
     
     							SELECT
     								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
    -								measurement + ' - value one' as metric,
    -								avg(valueOne) as value
    -							FROM
    -								metric_values
    -							WHERE
    -								time BETWEEN DATEADD(s, @from, '1970-01-01') AND DATEADD(s, @to, '1970-01-01') AND
    -								(@metric = 'ALL' OR measurement = @metric)
    -							GROUP BY
    -								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval,
    -								measurement
    -							UNION ALL
    -							SELECT
    -								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
    -								measurement + ' - value two' as metric,
    -								avg(valueTwo) as value
    +								measurement as metric,
    +								avg(valueOne) as valueOne,
    +								avg(valueTwo) as valueTwo
     							FROM
     								metric_values
     							WHERE
    @@ -717,10 +705,10 @@ func TestMSSQL(t *testing.T) {
     					So(queryResult.Error, ShouldBeNil)
     
     					So(len(queryResult.Series), ShouldEqual, 4)
    -					So(queryResult.Series[0].Name, ShouldEqual, "Metric A - value one")
    -					So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
    -					So(queryResult.Series[2].Name, ShouldEqual, "Metric A - value two")
    -					So(queryResult.Series[3].Name, ShouldEqual, "Metric B - value two")
    +					So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
    +					So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
    +					So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
    +					So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
     				})
     			})
     
    @@ -749,21 +737,9 @@ func TestMSSQL(t *testing.T) {
     
     							SELECT
     								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
    -								measurement + ' - value one' as metric,
    -								avg(valueOne) as value
    -							FROM
    -								metric_values
    -							WHERE
    -								time BETWEEN @from AND @to AND
    -								(@metric = 'ALL' OR measurement = @metric)
    -							GROUP BY
    -								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval,
    -								measurement
    -							UNION ALL
    -							SELECT
    -								CAST(ROUND(DATEDIFF(second, '1970-01-01', time)/CAST(@dInterval as float), 0) as bigint)*@dInterval as time,
    -								measurement + ' - value two' as metric,
    -								avg(valueTwo) as value
    +								measurement as metric,
    +								avg(valueOne) as valueOne,
    +								avg(valueTwo) as valueTwo
     							FROM
     								metric_values
     							WHERE
    @@ -806,10 +782,10 @@ func TestMSSQL(t *testing.T) {
     					So(queryResult.Error, ShouldBeNil)
     
     					So(len(queryResult.Series), ShouldEqual, 4)
    -					So(queryResult.Series[0].Name, ShouldEqual, "Metric A - value one")
    -					So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
    -					So(queryResult.Series[2].Name, ShouldEqual, "Metric A - value two")
    -					So(queryResult.Series[3].Name, ShouldEqual, "Metric B - value two")
    +					So(queryResult.Series[0].Name, ShouldEqual, "Metric A valueOne")
    +					So(queryResult.Series[1].Name, ShouldEqual, "Metric A valueTwo")
    +					So(queryResult.Series[2].Name, ShouldEqual, "Metric B valueOne")
    +					So(queryResult.Series[3].Name, ShouldEqual, "Metric B valueTwo")
     				})
     			})
     		})
    
    From 917b6b11b0fbae37d80a5dd097de031327e98679 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 13:54:57 +0200
    Subject: [PATCH 247/263] devenv: update sql dashboards
    
    ---
     .../datasource_tests_mssql_unittest.json      | 73 ++++---------------
     .../datasource_tests_mysql_unittest.json      | 73 ++++---------------
     .../datasource_tests_postgres_unittest.json   | 73 ++++---------------
     3 files changed, 42 insertions(+), 177 deletions(-)
    
    diff --git a/devenv/dev-dashboards/datasource_tests_mssql_unittest.json b/devenv/dev-dashboards/datasource_tests_mssql_unittest.json
    index d47cfb0ad6ea..0c7cc0fcc651 100644
    --- a/devenv/dev-dashboards/datasource_tests_mssql_unittest.json
    +++ b/devenv/dev-dashboards/datasource_tests_mssql_unittest.json
    @@ -64,7 +64,7 @@
       "editable": true,
       "gnetId": null,
       "graphTooltip": 0,
    -  "iteration": 1532618879985,
    +  "iteration": 1532949769359,
       "links": [],
       "panels": [
         {
    @@ -871,14 +871,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement + ' - value one' as metric, \n  avg(valueOne) as valueOne\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__timeGroup(time, '$summarize'), \n  measurement \nORDER BY 1",
    +          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement as metric, \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__timeGroup(time, '$summarize'), \n  measurement \nORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement + ' - value two' as metric, \n  avg(valueTwo) as valueTwo \nFROM\n  metric_values\nWHERE\n  $__timeFilter(time) AND\n  ($metric = 'ALL' OR measurement = $metric)\nGROUP BY \n  $__timeGroup(time, '$summarize'), \n  measurement \nORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1067,14 +1061,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1245,14 +1233,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1423,14 +1405,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1773,14 +1749,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1954,14 +1924,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2135,14 +2099,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2316,14 +2274,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values\nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value two' as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND ($metric = 'ALL' OR measurement = $metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2460,7 +2412,10 @@
       "refresh": false,
       "schemaVersion": 16,
       "style": "dark",
    -  "tags": ["gdev", "mssql"],
    +  "tags": [
    +    "gdev",
    +    "mssql"
    +  ],
       "templating": {
         "list": [
           {
    @@ -2587,5 +2542,5 @@
       "timezone": "",
       "title": "Datasource tests - MSSQL (unit test)",
       "uid": "GlAqcPgmz",
    -  "version": 58
    +  "version": 3
     }
    \ No newline at end of file
    diff --git a/devenv/dev-dashboards/datasource_tests_mysql_unittest.json b/devenv/dev-dashboards/datasource_tests_mysql_unittest.json
    index 326114ec8ff6..e95eedf254c0 100644
    --- a/devenv/dev-dashboards/datasource_tests_mysql_unittest.json
    +++ b/devenv/dev-dashboards/datasource_tests_mysql_unittest.json
    @@ -64,7 +64,7 @@
       "editable": true,
       "gnetId": null,
       "graphTooltip": 0,
    -  "iteration": 1532620354037,
    +  "iteration": 1532949531280,
       "links": [],
       "panels": [
         {
    @@ -871,14 +871,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  CONCAT(measurement, ' - value one') as metric, \n  avg(valueOne) as valueOne\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement IN($metric)\nGROUP BY 1, 2\nORDER BY 1",
    +          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  measurement as metric, \n  avg(valueOne) as valueOne,\n  avg(valueTwo) as valueTwo\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement IN($metric)\nGROUP BY 1, 2\nORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize') as time, \n  CONCAT(measurement, ' - value two') as metric, \n  avg(valueTwo) as valueTwo \nFROM\n  metric_values\nWHERE\n  $__timeFilter(time) AND\n  measurement IN($metric)\nGROUP BY 1,2\nORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1061,14 +1055,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1239,14 +1227,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1417,14 +1399,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__time(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1593,14 +1569,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1774,14 +1744,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1955,14 +1919,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2136,14 +2094,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values WHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), CONCAT(measurement, ' - value two') as metric, valueTwo FROM metric_values \nWHERE $__timeFilter(time) AND measurement IN($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2280,7 +2232,10 @@
       "refresh": false,
       "schemaVersion": 16,
       "style": "dark",
    -  "tags": ["gdev", "mysql"],
    +  "tags": [
    +    "gdev",
    +    "mysql"
    +  ],
       "templating": {
         "list": [
           {
    @@ -2405,5 +2360,5 @@
       "timezone": "",
       "title": "Datasource tests - MySQL (unittest)",
       "uid": "Hmf8FDkmz",
    -  "version": 12
    +  "version": 1
     }
    \ No newline at end of file
    diff --git a/devenv/dev-dashboards/datasource_tests_postgres_unittest.json b/devenv/dev-dashboards/datasource_tests_postgres_unittest.json
    index 85151089b7f3..2243baed0aa8 100644
    --- a/devenv/dev-dashboards/datasource_tests_postgres_unittest.json
    +++ b/devenv/dev-dashboards/datasource_tests_postgres_unittest.json
    @@ -64,7 +64,7 @@
       "editable": true,
       "gnetId": null,
       "graphTooltip": 0,
    -  "iteration": 1532619575136,
    +  "iteration": 1532951521836,
       "links": [],
       "panels": [
         {
    @@ -871,14 +871,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize'), \n  measurement || ' - value one' as metric, \n  avg(\"valueOne\") as \"valueOne\"\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1",
    +          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize'), \n  measurement, \n  avg(\"valueOne\") as \"valueOne\",\n  avg(\"valueTwo\") as \"valueTwo\"\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT \n  $__timeGroup(time, '$summarize'), \n  measurement || ' - value two' as metric, \n  avg(\"valueTwo\") as \"valueTwo\"\nFROM\n  metric_values \nWHERE\n  $__timeFilter(time) AND\n  measurement in($metric)\nGROUP BY 1, 2\nORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1049,14 +1043,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1227,14 +1215,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1405,14 +1387,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1581,14 +1557,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1762,14 +1732,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -1943,14 +1907,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2124,14 +2082,8 @@
             {
               "alias": "",
               "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value one' as metric, \"valueOne\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    +          "rawSql": "SELECT $__timeEpoch(time), measurement, \"valueOne\", \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
               "refId": "A"
    -        },
    -        {
    -          "alias": "",
    -          "format": "time_series",
    -          "rawSql": "SELECT $__timeEpoch(time), measurement || ' - value two' as metric, \"valueTwo\" FROM metric_values \nWHERE $__timeFilter(time) AND measurement in($metric) ORDER BY 1",
    -          "refId": "B"
             }
           ],
           "thresholds": [],
    @@ -2268,7 +2220,10 @@
       "refresh": false,
       "schemaVersion": 16,
       "style": "dark",
    -  "tags": ["gdev", "postgres"],
    +  "tags": [
    +    "gdev",
    +    "postgres"
    +  ],
       "templating": {
         "list": [
           {
    @@ -2397,5 +2352,5 @@
       "timezone": "",
       "title": "Datasource tests - Postgres (unittest)",
       "uid": "vHQdlVziz",
    -  "version": 17
    +  "version": 1
     }
    \ No newline at end of file
    
    From 8a22129177a8f3656cd55b411245d516a16c4c87 Mon Sep 17 00:00:00 2001
    From: Sven Klemm 
    Date: Mon, 30 Jul 2018 14:37:23 +0200
    Subject: [PATCH 248/263] add version note to metric prefix and fix typo
    
    ---
     docs/sources/features/datasources/mssql.md    | 3 ++-
     docs/sources/features/datasources/mysql.md    | 3 ++-
     docs/sources/features/datasources/postgres.md | 3 ++-
     3 files changed, 6 insertions(+), 3 deletions(-)
    
    diff --git a/docs/sources/features/datasources/mssql.md b/docs/sources/features/datasources/mssql.md
    index bcb965dda74d..ea7be8e1c30e 100644
    --- a/docs/sources/features/datasources/mssql.md
    +++ b/docs/sources/features/datasources/mssql.md
    @@ -148,7 +148,8 @@ The resulting table panel:
     
     ## Time series queries
     
    -If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must must have a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. You may return a column named `metric` that is used as metric name for the value column. Any column except `time` and `metric` is treated as a value column. If you omit the `metric` column, tha name of the value column will be the metric name. You may select multiple value columns, each will have its name as metric. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name.
    +If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must must have a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. You may return a column named `metric` that is used as metric name for the value column. Any column except `time` and `metric` is treated as a value column. If you omit the `metric` column, the name of the value column will be the metric name. You may select multiple value columns, each will have its name as metric.
    +If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+).
     
     **Example database table:**
     
    diff --git a/docs/sources/features/datasources/mysql.md b/docs/sources/features/datasources/mysql.md
    index c6e620eb08b2..22287b2a8382 100644
    --- a/docs/sources/features/datasources/mysql.md
    +++ b/docs/sources/features/datasources/mysql.md
    @@ -103,7 +103,8 @@ The resulting table panel:
     
     If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch.
     Any column except `time` and `metric` is treated as a value column.
    -You may return a column named `metric` that is used as metric name for the value column. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name.
    +You may return a column named `metric` that is used as metric name for the value column.
    +If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+).
     
     **Example with `metric` column:**
     
    diff --git a/docs/sources/features/datasources/postgres.md b/docs/sources/features/datasources/postgres.md
    index f3e52ed6652a..793b3b6f4c07 100644
    --- a/docs/sources/features/datasources/postgres.md
    +++ b/docs/sources/features/datasources/postgres.md
    @@ -101,7 +101,8 @@ The resulting table panel:
     
     If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch.
     Any column except `time` and `metric` is treated as a value column.
    -You may return a column named `metric` that is used as metric name for the value column. If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name.
    +You may return a column named `metric` that is used as metric name for the value column.
    +If you return multiple value columns and a column named `metric` then this column is used as prefix for the series name (only available in Grafana 5.3+).
     
     **Example with `metric` column:**
     
    
    From 9c0fbe5a0b3c2e334cff6d6bbe2cb4d5ae48a5fd Mon Sep 17 00:00:00 2001
    From: Worty <6840978+Worty@users.noreply.github.com>
    Date: Mon, 30 Jul 2018 16:19:31 +0200
    Subject: [PATCH 249/263] fixed that missing one
    
    ---
     public/app/core/utils/kbn.ts | 2 +-
     1 file changed, 1 insertion(+), 1 deletion(-)
    
    diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts
    index 74ef2a9e8740..7bf2cdc5fd67 100644
    --- a/public/app/core/utils/kbn.ts
    +++ b/public/app/core/utils/kbn.ts
    @@ -1118,7 +1118,7 @@ kbn.getUnitFormats = function() {
           submenu: [
             { text: 'parts-per-million (ppm)', value: 'ppm' },
             { text: 'parts-per-billion (ppb)', value: 'conppb' },
    -        { text: 'nanogram per cubic metre (ng/m3)', value: 'conngm3' },
    +        { text: 'nanogram per cubic metre (ng/m³)', value: 'conngm3' },
             { text: 'nanogram per normal cubic metre (ng/Nm³)', value: 'conngNm3' },
             { text: 'microgram per cubic metre (μg/m³)', value: 'conμgm3' },
             { text: 'microgram per normal cubic metre (μg/Nm³)', value: 'conμgNm3' },
    
    From 4fa979649cf412c491a1d9d42d1d0062b13ff55d Mon Sep 17 00:00:00 2001
    From: Worty <6840978+Worty@users.noreply.github.com>
    Date: Mon, 30 Jul 2018 16:28:19 +0200
    Subject: [PATCH 250/263] also fixed "Watt per square metre"
    
    ---
     public/app/core/utils/kbn.ts | 4 ++--
     1 file changed, 2 insertions(+), 2 deletions(-)
    
    diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts
    index 7bf2cdc5fd67..c2764670b95c 100644
    --- a/public/app/core/utils/kbn.ts
    +++ b/public/app/core/utils/kbn.ts
    @@ -500,7 +500,7 @@ kbn.valueFormats.watt = kbn.formatBuilders.decimalSIPrefix('W');
     kbn.valueFormats.kwatt = kbn.formatBuilders.decimalSIPrefix('W', 1);
     kbn.valueFormats.mwatt = kbn.formatBuilders.decimalSIPrefix('W', -1);
     kbn.valueFormats.kwattm = kbn.formatBuilders.decimalSIPrefix('W/Min', 1);
    -kbn.valueFormats.Wm2 = kbn.formatBuilders.fixedUnit('W/m2');
    +kbn.valueFormats.Wm2 = kbn.formatBuilders.fixedUnit('W/m²');
     kbn.valueFormats.voltamp = kbn.formatBuilders.decimalSIPrefix('VA');
     kbn.valueFormats.kvoltamp = kbn.formatBuilders.decimalSIPrefix('VA', 1);
     kbn.valueFormats.voltampreact = kbn.formatBuilders.decimalSIPrefix('var');
    @@ -1021,7 +1021,7 @@ kbn.getUnitFormats = function() {
             { text: 'Watt (W)', value: 'watt' },
             { text: 'Kilowatt (kW)', value: 'kwatt' },
             { text: 'Milliwatt (mW)', value: 'mwatt' },
    -        { text: 'Watt per square metre (W/m2)', value: 'Wm2' },
    +        { text: 'Watt per square metre (W/m²)', value: 'Wm2' },
             { text: 'Volt-ampere (VA)', value: 'voltamp' },
             { text: 'Kilovolt-ampere (kVA)', value: 'kvoltamp' },
             { text: 'Volt-ampere reactive (var)', value: 'voltampreact' },
    
    From 88d8072be3cd17ee7461481f1c17c51e69ed36b3 Mon Sep 17 00:00:00 2001
    From: Jason Pereira 
    Date: Mon, 30 Jul 2018 15:51:15 +0100
    Subject: [PATCH 251/263] add aws_dx to cloudwatch datasource
    
    ---
     pkg/tsdb/cloudwatch/metric_find_query.go | 2 ++
     1 file changed, 2 insertions(+)
    
    diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go
    index 136ee241c2e5..d2bd135ecc9a 100644
    --- a/pkg/tsdb/cloudwatch/metric_find_query.go
    +++ b/pkg/tsdb/cloudwatch/metric_find_query.go
    @@ -46,6 +46,7 @@ func init() {
     		"AWS/CloudFront":     {"Requests", "BytesDownloaded", "BytesUploaded", "TotalErrorRate", "4xxErrorRate", "5xxErrorRate"},
     		"AWS/CloudSearch":    {"SuccessfulRequests", "SearchableDocuments", "IndexUtilization", "Partitions"},
     		"AWS/DMS":            {"FreeableMemory", "WriteIOPS", "ReadIOPS", "WriteThroughput", "ReadThroughput", "WriteLatency", "ReadLatency", "SwapUsage", "NetworkTransmitThroughput", "NetworkReceiveThroughput", "FullLoadThroughputBandwidthSource", "FullLoadThroughputBandwidthTarget", "FullLoadThroughputRowsSource", "FullLoadThroughputRowsTarget", "CDCIncomingChanges", "CDCChangesMemorySource", "CDCChangesMemoryTarget", "CDCChangesDiskSource", "CDCChangesDiskTarget", "CDCThroughputBandwidthTarget", "CDCThroughputRowsSource", "CDCThroughputRowsTarget", "CDCLatencySource", "CDCLatencyTarget"},
    +		"AWS/DX":             {"ConnectionState", "ConnectionBpsEgress", "ConnectionBpsIngress", "ConnectionPpsEgress", "ConnectionPpsIngress", "ConnectionCRCErrorCount", "ConnectionLightLevelTx", "ConnectionLightLevelRx"},
     		"AWS/DynamoDB":       {"ConditionalCheckFailedRequests", "ConsumedReadCapacityUnits", "ConsumedWriteCapacityUnits", "OnlineIndexConsumedWriteCapacity", "OnlineIndexPercentageProgress", "OnlineIndexThrottleEvents", "ProvisionedReadCapacityUnits", "ProvisionedWriteCapacityUnits", "ReadThrottleEvents", "ReturnedBytes", "ReturnedItemCount", "ReturnedRecordsCount", "SuccessfulRequestLatency", "SystemErrors", "TimeToLiveDeletedItemCount", "ThrottledRequests", "UserErrors", "WriteThrottleEvents"},
     		"AWS/EBS":            {"VolumeReadBytes", "VolumeWriteBytes", "VolumeReadOps", "VolumeWriteOps", "VolumeTotalReadTime", "VolumeTotalWriteTime", "VolumeIdleTime", "VolumeQueueLength", "VolumeThroughputPercentage", "VolumeConsumedReadWriteOps", "BurstBalance"},
     		"AWS/EC2":            {"CPUCreditUsage", "CPUCreditBalance", "CPUUtilization", "DiskReadOps", "DiskWriteOps", "DiskReadBytes", "DiskWriteBytes", "NetworkIn", "NetworkOut", "NetworkPacketsIn", "NetworkPacketsOut", "StatusCheckFailed", "StatusCheckFailed_Instance", "StatusCheckFailed_System"},
    @@ -118,6 +119,7 @@ func init() {
     		"AWS/CloudFront":       {"DistributionId", "Region"},
     		"AWS/CloudSearch":      {},
     		"AWS/DMS":              {"ReplicationInstanceIdentifier", "ReplicationTaskIdentifier"},
    +		"AWS/DX":               {"ConnectionId"},
     		"AWS/DynamoDB":         {"TableName", "GlobalSecondaryIndexName", "Operation", "StreamLabel"},
     		"AWS/EBS":              {"VolumeId"},
     		"AWS/EC2":              {"AutoScalingGroupName", "ImageId", "InstanceId", "InstanceType"},
    
    From 162d3e8036f8365e294502b6dcd496518c951a5b Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 17:03:01 +0200
    Subject: [PATCH 252/263] changelog: add notes about closing #12727
    
    [skip ci]
    ---
     CHANGELOG.md | 1 +
     1 file changed, 1 insertion(+)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index b8f5bced9727..c2e8c5c788e7 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -16,6 +16,7 @@
     * **Prometheus**: Add $interval, $interval_ms, $range, and $range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597)
     * **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda)
     * **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm)
    +* **Postgres/MySQL/MSSQL**: Use metric column as prefix when returning multiple value columns [#12727](https://github.com/grafana/grafana/issues/12727), thx [@svenklemm](https://github.com/svenklemm)
     * **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley)
     * **Postgres**: Escape ssl mode parameter in connectionstring [#12644](https://github.com/grafana/grafana/issues/12644), thx [@yogyrahmawan](https://github.com/yogyrahmawan)
     * **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber)
    
    From ad84a145f56f1fc1a8d513014c05ef40326f89a4 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Mon, 30 Jul 2018 17:03:24 +0200
    Subject: [PATCH 253/263] changelog: add notes about closing #12744
    
    [skip ci]
    ---
     CHANGELOG.md | 1 +
     1 file changed, 1 insertion(+)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index c2e8c5c788e7..11baca97714d 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -26,6 +26,7 @@
     * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
     * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
     * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
    +* **Units**: Change units to include characters for power of 2 and 3 [#12744](https://github.com/grafana/grafana/pull/12744), thx [@Worty](https://github.com/Worty)
     
     # 5.2.2 (2018-07-25)
     
    
    From e4c2476f3c898879fa6be89c18e1ea325bf88c13 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Tue, 31 Jul 2018 09:35:08 +0200
    Subject: [PATCH 254/263] Weird execution order for the tests...
    
    ---
     .../datasource/prometheus/datasource.ts       |  7 +++++-
     .../prometheus/result_transformer.ts          |  7 +++++-
     .../prometheus/specs/_datasource.jest.ts      | 25 +++----------------
     3 files changed, 15 insertions(+), 24 deletions(-)
    
    diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts
    index 75a946d6f368..6801a9a1d592 100644
    --- a/public/app/plugins/datasource/prometheus/datasource.ts
    +++ b/public/app/plugins/datasource/prometheus/datasource.ts
    @@ -175,8 +175,12 @@ export class PrometheusDatasource {
               responseIndex: index,
               refId: activeTargets[index].refId,
             };
    -
    +        console.log('format: ' + transformerOptions.format);
    +        console.log('resultType: ' + response.data.data.resultType);
    +        console.log('legendFormat: ' + transformerOptions.legendFormat);
    +        // console.log(result);
             this.resultTransformer.transform(result, response, transformerOptions);
    +        // console.log(result);
           });
     
           return { data: result };
    @@ -233,6 +237,7 @@ export class PrometheusDatasource {
         if (start > end) {
           throw { message: 'Invalid time range' };
         }
    +    // console.log(query.expr);
     
         var url = '/api/v1/query_range';
         var data = {
    diff --git a/public/app/plugins/datasource/prometheus/result_transformer.ts b/public/app/plugins/datasource/prometheus/result_transformer.ts
    index b6d8a32af5f5..4b69cb98c547 100644
    --- a/public/app/plugins/datasource/prometheus/result_transformer.ts
    +++ b/public/app/plugins/datasource/prometheus/result_transformer.ts
    @@ -6,7 +6,9 @@ export class ResultTransformer {
     
       transform(result: any, response: any, options: any) {
         let prometheusResult = response.data.data.result;
    -
    +    console.log(prometheusResult);
    +    // console.log(options);
    +    // console.log(result);
         if (options.format === 'table') {
           result.push(this.transformMetricDataToTable(prometheusResult, options.responseListLength, options.refId));
         } else if (options.format === 'heatmap') {
    @@ -26,6 +28,7 @@ export class ResultTransformer {
             }
           }
         }
    +    // console.log(result);
       }
     
       transformMetricData(metricData, options, start, end) {
    @@ -137,6 +140,7 @@ export class ResultTransformer {
         if (!label || label === '{}') {
           label = options.query;
         }
    +    console.log(label);
         return label;
       }
     
    @@ -156,6 +160,7 @@ export class ResultTransformer {
         var labelPart = _.map(_.toPairs(labelData), function(label) {
           return label[0] + '="' + label[1] + '"';
         }).join(',');
    +    console.log(metricName);
         return metricName + '{' + labelPart + '}';
       }
     
    diff --git a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    index 384abc8f9027..34f78585d76f 100644
    --- a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    +++ b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    @@ -21,23 +21,7 @@ let backendSrv = {
     };
     
     let templateSrv = {
    -  replace: (target, scopedVars, format) => {
    -    if (!target) {
    -      return target;
    -    }
    -    let variable, value, fmt;
    -
    -    return target.replace(scopedVars, (match, var1, var2, fmt2, var3, fmt3) => {
    -      variable = this.index[var1 || var2 || var3];
    -      fmt = fmt2 || fmt3 || format;
    -      if (scopedVars) {
    -        value = scopedVars[var1 || var2 || var3];
    -        if (value) {
    -          return this.formatValue(value.value, fmt, variable);
    -        }
    -      }
    -    });
    -  },
    +  replace: jest.fn(str => str),
     };
     
     let timeSrv = {
    @@ -63,10 +47,7 @@ describe('PrometheusDatasource', function() {
       //     })
       //   );
     
    -  beforeEach(() => {
    -    ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -  });
    -  describe('When querying prometheus with one target using query editor target spec', function() {
    +  describe('When querying prometheus with one target using query editor target spec', async () => {
         var results;
         var query = {
           range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
    @@ -106,7 +87,7 @@ describe('PrometheusDatasource', function() {
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
         });
    -    it('should return series list', function() {
    +    it('should return series list', async () => {
           expect(results.data.length).toBe(1);
           expect(results.data[0].target).toBe('test{job="testjob"}');
         });
    
    From f1f0400769f01c99101914cb1ba62cca0e64ac94 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Tue, 31 Jul 2018 11:41:58 +0200
    Subject: [PATCH 255/263] changelog: add notes about closing #12300
    
    [skip ci]
    ---
     CHANGELOG.md | 1 +
     1 file changed, 1 insertion(+)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index 11baca97714d..d3532ebe6402 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -23,6 +23,7 @@
     * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane)
     * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek)
     * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
    +* **Cloudwatch**: AWS/AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
     * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
     * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
     * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
    
    From 276a5e6eb5603df07d48aa66af4763bc9f3576c8 Mon Sep 17 00:00:00 2001
    From: =?UTF-8?q?Torkel=20=C3=96degaard?= 
    Date: Tue, 31 Jul 2018 17:29:02 +0200
    Subject: [PATCH 256/263] fix: test data api route used old name for test data
     datasource, fixes #12773
    
    ---
     pkg/api/metrics.go | 2 +-
     1 file changed, 1 insertion(+), 1 deletion(-)
    
    diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go
    index 00ad25ab8c2f..f2bc79df7ad6 100644
    --- a/pkg/api/metrics.go
    +++ b/pkg/api/metrics.go
    @@ -99,7 +99,7 @@ func GetTestDataRandomWalk(c *m.ReqContext) Response {
     	timeRange := tsdb.NewTimeRange(from, to)
     	request := &tsdb.TsdbQuery{TimeRange: timeRange}
     
    -	dsInfo := &m.DataSource{Type: "grafana-testdata-datasource"}
    +	dsInfo := &m.DataSource{Type: "testdata"}
     	request.Queries = append(request.Queries, &tsdb.Query{
     		RefId:      "A",
     		IntervalMs: intervalMs,
    
    From 89eae1566d036e153aea18eb62e983bc21bd315f Mon Sep 17 00:00:00 2001
    From: =?UTF-8?q?Torkel=20=C3=96degaard?= 
    Date: Tue, 31 Jul 2018 17:31:45 +0200
    Subject: [PATCH 257/263] fix: team email tooltip was not showing
    
    ---
     public/app/core/components/Forms/Forms.tsx | 2 +-
     1 file changed, 1 insertion(+), 1 deletion(-)
    
    diff --git a/public/app/core/components/Forms/Forms.tsx b/public/app/core/components/Forms/Forms.tsx
    index 4b74d48ba08b..543e1a1d6dfb 100644
    --- a/public/app/core/components/Forms/Forms.tsx
    +++ b/public/app/core/components/Forms/Forms.tsx
    @@ -12,7 +12,7 @@ export const Label: SFC = props => {
         
           {props.children}
           {props.tooltip && (
    -        
    +        
               
             
           )}
    
    From 6df3722a35faf455e2d25989a80a8e167531b5b7 Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Tue, 31 Jul 2018 18:01:36 +0200
    Subject: [PATCH 258/263] changelog: add notes about closing #12762
    
    [skip ci]
    ---
     CHANGELOG.md | 3 ++-
     1 file changed, 2 insertions(+), 1 deletion(-)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index d3532ebe6402..dde7ead6f136 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -23,7 +23,8 @@
     * **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane)
     * **Units**: Polish złoty currency [#12691](https://github.com/grafana/grafana/pull/12691), thx [@mwegrzynek](https://github.com/mwegrzynek)
     * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
    -* **Cloudwatch**: AWS/AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
    +* **Cloudwatch**: AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
    +* **Cloudwatch**: Direct Connect metrics and dimensions [#12762](https://github.com/grafana/grafana/pulls/12762), thx [@mindriot88](https://github.com/mindriot88)
     * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
     * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
     * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
    
    From d6158bc2935ec396f45114d736e684bb3a522c6b Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Wed, 1 Aug 2018 09:30:26 +0200
    Subject: [PATCH 259/263] All tests passing
    
    ---
     .../datasource/prometheus/datasource.ts       |   6 -
     .../prometheus/result_transformer.ts          |   7 +-
     .../prometheus/specs/_datasource.jest.ts      | 317 ++++----
     .../prometheus/specs/datasource_specs.ts      | 683 ------------------
     4 files changed, 188 insertions(+), 825 deletions(-)
     delete mode 100644 public/app/plugins/datasource/prometheus/specs/datasource_specs.ts
    
    diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts
    index 6801a9a1d592..ac8d774db591 100644
    --- a/public/app/plugins/datasource/prometheus/datasource.ts
    +++ b/public/app/plugins/datasource/prometheus/datasource.ts
    @@ -175,12 +175,7 @@ export class PrometheusDatasource {
               responseIndex: index,
               refId: activeTargets[index].refId,
             };
    -        console.log('format: ' + transformerOptions.format);
    -        console.log('resultType: ' + response.data.data.resultType);
    -        console.log('legendFormat: ' + transformerOptions.legendFormat);
    -        // console.log(result);
             this.resultTransformer.transform(result, response, transformerOptions);
    -        // console.log(result);
           });
     
           return { data: result };
    @@ -237,7 +232,6 @@ export class PrometheusDatasource {
         if (start > end) {
           throw { message: 'Invalid time range' };
         }
    -    // console.log(query.expr);
     
         var url = '/api/v1/query_range';
         var data = {
    diff --git a/public/app/plugins/datasource/prometheus/result_transformer.ts b/public/app/plugins/datasource/prometheus/result_transformer.ts
    index 4b69cb98c547..b6d8a32af5f5 100644
    --- a/public/app/plugins/datasource/prometheus/result_transformer.ts
    +++ b/public/app/plugins/datasource/prometheus/result_transformer.ts
    @@ -6,9 +6,7 @@ export class ResultTransformer {
     
       transform(result: any, response: any, options: any) {
         let prometheusResult = response.data.data.result;
    -    console.log(prometheusResult);
    -    // console.log(options);
    -    // console.log(result);
    +
         if (options.format === 'table') {
           result.push(this.transformMetricDataToTable(prometheusResult, options.responseListLength, options.refId));
         } else if (options.format === 'heatmap') {
    @@ -28,7 +26,6 @@ export class ResultTransformer {
             }
           }
         }
    -    // console.log(result);
       }
     
       transformMetricData(metricData, options, start, end) {
    @@ -140,7 +137,6 @@ export class ResultTransformer {
         if (!label || label === '{}') {
           label = options.query;
         }
    -    console.log(label);
         return label;
       }
     
    @@ -160,7 +156,6 @@ export class ResultTransformer {
         var labelPart = _.map(_.toPairs(labelData), function(label) {
           return label[0] + '="' + label[1] + '"';
         }).join(',');
    -    console.log(metricName);
         return metricName + '{' + labelPart + '}';
       }
     
    diff --git a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    index 34f78585d76f..2deab13a1010 100644
    --- a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    +++ b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    @@ -1,6 +1,7 @@
     import moment from 'moment';
     import { PrometheusDatasource } from '../datasource';
     import $q from 'q';
    +import { angularMocks } from 'test/lib/common';
     
     const SECOND = 1000;
     const MINUTE = 60 * SECOND;
    @@ -57,32 +58,31 @@ describe('PrometheusDatasource', function() {
         // Interval alignment with step
         var urlExpected =
           'proxied/api/v1/query_range?query=' + encodeURIComponent('test{job="testjob"}') + '&start=60&end=240&step=60';
    -    var response = {
    -      data: {
    -        status: 'success',
    +
    +    beforeEach(async () => {
    +      let response = {
             data: {
    -          resultType: 'matrix',
    -          result: [
    -            {
    -              metric: { __name__: 'test', job: 'testjob' },
    -              values: [[60, '3846']],
    -            },
    -          ],
    +          status: 'success',
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                values: [[60, '3846']],
    +              },
    +            ],
    +          },
             },
    -      },
    -    };
    -    beforeEach(async () => {
    -      //   ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    +      };
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
     
           await ctx.ds.query(query).then(function(data) {
             results = data;
           });
    -      //   ctx.$httpBackend.flush();
         });
    +
         it('should generate the correct query', function() {
    -      //   ctx.$httpBackend.verifyNoOutstandingExpectation();
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
    @@ -97,39 +97,33 @@ describe('PrometheusDatasource', function() {
         var start = 60;
         var end = 360;
         var step = 60;
    -    // var urlExpected =
    -    //   'proxied/api/v1/query_range?query=' +
    -    //   encodeURIComponent('test{job="testjob"}') +
    -    //   '&start=' +
    -    //   start +
    -    //   '&end=' +
    -    //   end +
    -    //   '&step=' +
    -    //   step;
    +
         var query = {
           range: { from: time({ seconds: start }), to: time({ seconds: end }) },
           targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
           interval: '60s',
         };
    -    var response = {
    -      status: 'success',
    -      data: {
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
             data: {
    -          resultType: 'matrix',
    -          result: [
    -            {
    -              metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
    -              values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
    -            },
    -            {
    -              metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
    -              values: [[start + step * 2, '4846']],
    -            },
    -          ],
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
    +                values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
    +              },
    +              {
    +                metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
    +                values: [[start + step * 2, '4846']],
    +              },
    +            ],
    +          },
             },
    -      },
    -    };
    -    beforeEach(async () => {
    +      };
    +
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
     
    @@ -137,11 +131,13 @@ describe('PrometheusDatasource', function() {
             results = data;
           });
         });
    +
         it('should be same length', function() {
           expect(results.data.length).toBe(2);
           expect(results.data[0].datapoints.length).toBe((end - start) / step + 1);
           expect(results.data[1].datapoints.length).toBe((end - start) / step + 1);
         });
    +
         it('should fill null until first datapoint in response', function() {
           expect(results.data[0].datapoints[0][1]).toBe(start * 1000);
           expect(results.data[0].datapoints[0][0]).toBe(null);
    @@ -172,21 +168,23 @@ describe('PrometheusDatasource', function() {
           targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
           interval: '60s',
         };
    -    var response = {
    -      status: 'success',
    -      data: {
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
             data: {
    -          resultType: 'vector',
    -          result: [
    -            {
    -              metric: { __name__: 'test', job: 'testjob' },
    -              value: [123, '3846'],
    -            },
    -          ],
    +          data: {
    +            resultType: 'vector',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                value: [123, '3846'],
    +              },
    +            ],
    +          },
             },
    -      },
    -    };
    -    beforeEach(async () => {
    +      };
    +
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
     
    @@ -206,10 +204,7 @@ describe('PrometheusDatasource', function() {
       });
       describe('When performing annotationQuery', function() {
         var results;
    -    // var urlExpected =
    -    //   'proxied/api/v1/query_range?query=' +
    -    //   encodeURIComponent('ALERTS{alertstate="firing"}') +
    -    //   '&start=60&end=180&step=60';
    +
         var options = {
           annotation: {
             expr: 'ALERTS{alertstate="firing"}',
    @@ -222,27 +217,29 @@ describe('PrometheusDatasource', function() {
             to: time({ seconds: 123 }),
           },
         };
    -    var response = {
    -      status: 'success',
    -      data: {
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
             data: {
    -          resultType: 'matrix',
    -          result: [
    -            {
    -              metric: {
    -                __name__: 'ALERTS',
    -                alertname: 'InstanceDown',
    -                alertstate: 'firing',
    -                instance: 'testinstance',
    -                job: 'testjob',
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: {
    +                  __name__: 'ALERTS',
    +                  alertname: 'InstanceDown',
    +                  alertstate: 'firing',
    +                  instance: 'testinstance',
    +                  job: 'testjob',
    +                },
    +                values: [[123, '1']],
                   },
    -              values: [[123, '1']],
    -            },
    -          ],
    +            ],
    +          },
             },
    -      },
    -    };
    -    beforeEach(async () => {
    +      };
    +
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
     
    @@ -262,28 +259,29 @@ describe('PrometheusDatasource', function() {
     
       describe('When resultFormat is table and instant = true', function() {
         var results;
    -    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    +    // var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
         var query = {
           range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
           targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
           interval: '60s',
         };
    -    var response = {
    -      status: 'success',
    -      data: {
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
             data: {
    -          resultType: 'vector',
    -          result: [
    -            {
    -              metric: { __name__: 'test', job: 'testjob' },
    -              value: [123, '3846'],
    -            },
    -          ],
    +          data: {
    +            resultType: 'vector',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                value: [123, '3846'],
    +              },
    +            ],
    +          },
             },
    -      },
    -    };
    +      };
     
    -    beforeEach(async () => {
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query).then(function(data) {
    @@ -520,9 +518,13 @@ describe('PrometheusDatasource', function() {
               __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
             },
           };
    +
           var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=60&end=420&step=10';
     
    +      templateSrv.replace = jest.fn(str => str);
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query);
    @@ -530,10 +532,16 @@ describe('PrometheusDatasource', function() {
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
     
    -      expect(query.scopedVars.__interval.text).toBe('10s');
    -      expect(query.scopedVars.__interval.value).toBe('10s');
    -      expect(query.scopedVars.__interval_ms.text).toBe(10 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).toBe(10 * 1000);
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '10s',
    +          value: '10s',
    +        },
    +        __interval_ms: {
    +          text: 10000,
    +          value: 10000,
    +        },
    +      });
         });
         it('should be min interval when it is greater than auto interval', async () => {
           var query = {
    @@ -552,18 +560,27 @@ describe('PrometheusDatasource', function() {
             },
           };
           var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=60&end=420&step=10';
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      templateSrv.replace = jest.fn(str => str);
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query);
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
     
    -      expect(query.scopedVars.__interval.text).toBe('5s');
    -      expect(query.scopedVars.__interval.value).toBe('5s');
    -      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
         });
         it('should account for intervalFactor', async () => {
           var query = {
    @@ -583,14 +600,28 @@ describe('PrometheusDatasource', function() {
             },
           };
           var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[100s])') + '&start=0&end=500&step=100';
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=0&end=500&step=100';
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      templateSrv.replace = jest.fn(str => str);
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query);
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
     
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '10s',
    +          value: '10s',
    +        },
    +        __interval_ms: {
    +          text: 10000,
    +          value: 10000,
    +        },
    +      });
    +
           expect(query.scopedVars.__interval.text).toBe('10s');
           expect(query.scopedVars.__interval.value).toBe('10s');
           expect(query.scopedVars.__interval_ms.text).toBe(10 * 1000);
    @@ -614,7 +645,11 @@ describe('PrometheusDatasource', function() {
             },
           };
           var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[50s])') + '&start=50&end=450&step=50';
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=50&end=450&step=50';
    +
    +      templateSrv.replace = jest.fn(str => str);
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query);
    @@ -622,10 +657,16 @@ describe('PrometheusDatasource', function() {
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
     
    -      expect(query.scopedVars.__interval.text).toBe('5s');
    -      expect(query.scopedVars.__interval.value).toBe('5s');
    -      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
         });
         it('should be min interval when greater than interval * intervalFactor', async () => {
           var query = {
    @@ -645,7 +686,9 @@ describe('PrometheusDatasource', function() {
             },
           };
           var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[15s])') + '&start=60&end=420&step=15';
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=60&end=420&step=15';
     
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    @@ -654,10 +697,16 @@ describe('PrometheusDatasource', function() {
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
     
    -      expect(query.scopedVars.__interval.text).toBe('5s');
    -      expect(query.scopedVars.__interval.value).toBe('5s');
    -      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
         });
         it('should be determined by the 11000 data points limit, accounting for intervalFactor', async () => {
           var query = {
    @@ -679,23 +728,30 @@ describe('PrometheusDatasource', function() {
           var start = 0;
           var urlExpected =
             'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[60s])') +
    +        encodeURIComponent('rate(test[$__interval])') +
             '&start=' +
             start +
             '&end=' +
             end +
             '&step=60';
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      templateSrv.replace = jest.fn(str => str);
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query);
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
     
    -      expect(query.scopedVars.__interval.text).toBe('5s');
    -      expect(query.scopedVars.__interval.value).toBe('5s');
    -      expect(query.scopedVars.__interval_ms.text).toBe(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).toBe(5 * 1000);
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
         });
       });
     });
    @@ -738,21 +794,22 @@ describe('PrometheusDatasource for POST', function() {
           targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
           interval: '60s',
         };
    -    var response = {
    -      status: 'success',
    -      data: {
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
             data: {
    -          resultType: 'matrix',
    -          result: [
    -            {
    -              metric: { __name__: 'test', job: 'testjob' },
    -              values: [[2 * 60, '3846']],
    -            },
    -          ],
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                values: [[2 * 60, '3846']],
    +              },
    +            ],
    +          },
             },
    -      },
    -    };
    -    beforeEach(async () => {
    +      };
           backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
           ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
           await ctx.ds.query(query).then(function(data) {
    diff --git a/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts b/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts
    deleted file mode 100644
    index c5da671b7576..000000000000
    --- a/public/app/plugins/datasource/prometheus/specs/datasource_specs.ts
    +++ /dev/null
    @@ -1,683 +0,0 @@
    -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
    -import moment from 'moment';
    -import $ from 'jquery';
    -import helpers from 'test/specs/helpers';
    -import { PrometheusDatasource } from '../datasource';
    -
    -const SECOND = 1000;
    -const MINUTE = 60 * SECOND;
    -const HOUR = 60 * MINUTE;
    -
    -const time = ({ hours = 0, seconds = 0, minutes = 0 }) => moment(hours * HOUR + minutes * MINUTE + seconds * SECOND);
    -
    -describe('PrometheusDatasource', function() {
    -  var ctx = new helpers.ServiceTestContext();
    -  var instanceSettings = {
    -    url: 'proxied',
    -    directUrl: 'direct',
    -    user: 'test',
    -    password: 'mupp',
    -    jsonData: { httpMethod: 'GET' },
    -  };
    -
    -  beforeEach(angularMocks.module('grafana.core'));
    -  beforeEach(angularMocks.module('grafana.services'));
    -  beforeEach(ctx.providePhase(['timeSrv']));
    -
    -  beforeEach(
    -    angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
    -      ctx.$q = $q;
    -      ctx.$httpBackend = $httpBackend;
    -      ctx.$rootScope = $rootScope;
    -      ctx.ds = $injector.instantiate(PrometheusDatasource, {
    -        instanceSettings: instanceSettings,
    -      });
    -      $httpBackend.when('GET', /\.html$/).respond('');
    -    })
    -  );
    -  describe('When querying prometheus with one target using query editor target spec', function() {
    -    var results;
    -    var query = {
    -      range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    -      interval: '60s',
    -    };
    -    // Interval alignment with step
    -    var urlExpected =
    -      'proxied/api/v1/query_range?query=' + encodeURIComponent('test{job="testjob"}') + '&start=60&end=240&step=60';
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'matrix',
    -        result: [
    -          {
    -            metric: { __name__: 'test', job: 'testjob' },
    -            values: [[60, '3846']],
    -          },
    -        ],
    -      },
    -    };
    -    beforeEach(function() {
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -      ctx.$httpBackend.flush();
    -    });
    -    it('should generate the correct query', function() {
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should return series list', function() {
    -      expect(results.data.length).to.be(1);
    -      expect(results.data[0].target).to.be('test{job="testjob"}');
    -    });
    -  });
    -  describe('When querying prometheus with one target which return multiple series', function() {
    -    var results;
    -    var start = 60;
    -    var end = 360;
    -    var step = 60;
    -    var urlExpected =
    -      'proxied/api/v1/query_range?query=' +
    -      encodeURIComponent('test{job="testjob"}') +
    -      '&start=' +
    -      start +
    -      '&end=' +
    -      end +
    -      '&step=' +
    -      step;
    -    var query = {
    -      range: { from: time({ seconds: start }), to: time({ seconds: end }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    -      interval: '60s',
    -    };
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'matrix',
    -        result: [
    -          {
    -            metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
    -            values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
    -          },
    -          {
    -            metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
    -            values: [[start + step * 2, '4846']],
    -          },
    -        ],
    -      },
    -    };
    -    beforeEach(function() {
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -      ctx.$httpBackend.flush();
    -    });
    -    it('should be same length', function() {
    -      expect(results.data.length).to.be(2);
    -      expect(results.data[0].datapoints.length).to.be((end - start) / step + 1);
    -      expect(results.data[1].datapoints.length).to.be((end - start) / step + 1);
    -    });
    -    it('should fill null until first datapoint in response', function() {
    -      expect(results.data[0].datapoints[0][1]).to.be(start * 1000);
    -      expect(results.data[0].datapoints[0][0]).to.be(null);
    -      expect(results.data[0].datapoints[1][1]).to.be((start + step * 1) * 1000);
    -      expect(results.data[0].datapoints[1][0]).to.be(3846);
    -    });
    -    it('should fill null after last datapoint in response', function() {
    -      var length = (end - start) / step + 1;
    -      expect(results.data[0].datapoints[length - 2][1]).to.be((end - step * 1) * 1000);
    -      expect(results.data[0].datapoints[length - 2][0]).to.be(3848);
    -      expect(results.data[0].datapoints[length - 1][1]).to.be(end * 1000);
    -      expect(results.data[0].datapoints[length - 1][0]).to.be(null);
    -    });
    -    it('should fill null at gap between series', function() {
    -      expect(results.data[0].datapoints[2][1]).to.be((start + step * 2) * 1000);
    -      expect(results.data[0].datapoints[2][0]).to.be(null);
    -      expect(results.data[1].datapoints[1][1]).to.be((start + step * 1) * 1000);
    -      expect(results.data[1].datapoints[1][0]).to.be(null);
    -      expect(results.data[1].datapoints[3][1]).to.be((start + step * 3) * 1000);
    -      expect(results.data[1].datapoints[3][0]).to.be(null);
    -    });
    -  });
    -  describe('When querying prometheus with one target and instant = true', function() {
    -    var results;
    -    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    -    var query = {
    -      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    -      interval: '60s',
    -    };
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'vector',
    -        result: [
    -          {
    -            metric: { __name__: 'test', job: 'testjob' },
    -            value: [123, '3846'],
    -          },
    -        ],
    -      },
    -    };
    -    beforeEach(function() {
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -      ctx.$httpBackend.flush();
    -    });
    -    it('should generate the correct query', function() {
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should return series list', function() {
    -      expect(results.data.length).to.be(1);
    -      expect(results.data[0].target).to.be('test{job="testjob"}');
    -    });
    -  });
    -  describe('When performing annotationQuery', function() {
    -    var results;
    -    var urlExpected =
    -      'proxied/api/v1/query_range?query=' +
    -      encodeURIComponent('ALERTS{alertstate="firing"}') +
    -      '&start=60&end=180&step=60';
    -    var options = {
    -      annotation: {
    -        expr: 'ALERTS{alertstate="firing"}',
    -        tagKeys: 'job',
    -        titleFormat: '{{alertname}}',
    -        textFormat: '{{instance}}',
    -      },
    -      range: {
    -        from: time({ seconds: 63 }),
    -        to: time({ seconds: 123 }),
    -      },
    -    };
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'matrix',
    -        result: [
    -          {
    -            metric: {
    -              __name__: 'ALERTS',
    -              alertname: 'InstanceDown',
    -              alertstate: 'firing',
    -              instance: 'testinstance',
    -              job: 'testjob',
    -            },
    -            values: [[123, '1']],
    -          },
    -        ],
    -      },
    -    };
    -    beforeEach(function() {
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.annotationQuery(options).then(function(data) {
    -        results = data;
    -      });
    -      ctx.$httpBackend.flush();
    -    });
    -    it('should return annotation list', function() {
    -      ctx.$rootScope.$apply();
    -      expect(results.length).to.be(1);
    -      expect(results[0].tags).to.contain('testjob');
    -      expect(results[0].title).to.be('InstanceDown');
    -      expect(results[0].text).to.be('testinstance');
    -      expect(results[0].time).to.be(123 * 1000);
    -    });
    -  });
    -
    -  describe('When resultFormat is table and instant = true', function() {
    -    var results;
    -    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    -    var query = {
    -      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    -      interval: '60s',
    -    };
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'vector',
    -        result: [
    -          {
    -            metric: { __name__: 'test', job: 'testjob' },
    -            value: [123, '3846'],
    -          },
    -        ],
    -      },
    -    };
    -
    -    beforeEach(function() {
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -      ctx.$httpBackend.flush();
    -    });
    -
    -    it('should return result', () => {
    -      expect(results).not.to.be(null);
    -    });
    -  });
    -
    -  describe('The "step" query parameter', function() {
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'matrix',
    -        result: [],
    -      },
    -    };
    -
    -    it('should be min interval when greater than auto interval', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '10s',
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -
    -    it('step should never go below 1', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [{ expr: 'test' }],
    -        interval: '100ms',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=1';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -
    -    it('should be auto interval when greater than min interval', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '5s',
    -          },
    -        ],
    -        interval: '10s',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should result in querying fewer than 11000 data points', function() {
    -      var query = {
    -        // 6 hour range
    -        range: { from: time({ hours: 1 }), to: time({ hours: 7 }) },
    -        targets: [{ expr: 'test' }],
    -        interval: '1s',
    -      };
    -      var end = 7 * 60 * 60;
    -      var start = 60 * 60;
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=' + start + '&end=' + end + '&step=2';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should not apply min interval when interval * intervalFactor greater', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '10s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      // times get rounded up to interval
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=50&end=450&step=50';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should apply min interval when interval * intervalFactor smaller', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '15s',
    -            intervalFactor: 2,
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=60&end=420&step=15';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should apply intervalFactor to auto interval when greater', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '5s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '10s',
    -      };
    -      // times get aligned to interval
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=0&end=500&step=100';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should not not be affected by the 11000 data points limit when large enough', function() {
    -      var query = {
    -        // 1 week range
    -        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '10s',
    -      };
    -      var end = 7 * 24 * 60 * 60;
    -      var start = 0;
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=100';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should be determined by the 11000 data points limit when too small', function() {
    -      var query = {
    -        // 1 week range
    -        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      var end = 7 * 24 * 60 * 60;
    -      var start = 0;
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=60';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -  });
    -
    -  describe('The __interval and __interval_ms template variables', function() {
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'matrix',
    -        result: [],
    -      },
    -    };
    -
    -    it('should be unchanged when auto interval is greater than min interval', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '5s',
    -          },
    -        ],
    -        interval: '10s',
    -        scopedVars: {
    -          __interval: { text: '10s', value: '10s' },
    -          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -
    -      expect(query.scopedVars.__interval.text).to.be('10s');
    -      expect(query.scopedVars.__interval.value).to.be('10s');
    -      expect(query.scopedVars.__interval_ms.text).to.be(10 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).to.be(10 * 1000);
    -    });
    -    it('should be min interval when it is greater than auto interval', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '10s',
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[10s])') + '&start=60&end=420&step=10';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -
    -      expect(query.scopedVars.__interval.text).to.be('5s');
    -      expect(query.scopedVars.__interval.value).to.be('5s');
    -      expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000);
    -    });
    -    it('should account for intervalFactor', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '5s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '10s',
    -        scopedVars: {
    -          __interval: { text: '10s', value: '10s' },
    -          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[100s])') + '&start=0&end=500&step=100';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -
    -      expect(query.scopedVars.__interval.text).to.be('10s');
    -      expect(query.scopedVars.__interval.value).to.be('10s');
    -      expect(query.scopedVars.__interval_ms.text).to.be(10 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).to.be(10 * 1000);
    -    });
    -    it('should be interval * intervalFactor when greater than min interval', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '10s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[50s])') + '&start=50&end=450&step=50';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -
    -      expect(query.scopedVars.__interval.text).to.be('5s');
    -      expect(query.scopedVars.__interval.value).to.be('5s');
    -      expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000);
    -    });
    -    it('should be min interval when greater than interval * intervalFactor', function() {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '15s',
    -            intervalFactor: 2,
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' + encodeURIComponent('rate(test[15s])') + '&start=60&end=420&step=15';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -
    -      expect(query.scopedVars.__interval.text).to.be('5s');
    -      expect(query.scopedVars.__interval.value).to.be('5s');
    -      expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000);
    -    });
    -    it('should be determined by the 11000 data points limit, accounting for intervalFactor', function() {
    -      var query = {
    -        // 1 week range
    -        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var end = 7 * 24 * 60 * 60;
    -      var start = 0;
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[60s])') +
    -        '&start=' +
    -        start +
    -        '&end=' +
    -        end +
    -        '&step=60';
    -      ctx.$httpBackend.expect('GET', urlExpected).respond(response);
    -      ctx.ds.query(query);
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -
    -      expect(query.scopedVars.__interval.text).to.be('5s');
    -      expect(query.scopedVars.__interval.value).to.be('5s');
    -      expect(query.scopedVars.__interval_ms.text).to.be(5 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).to.be(5 * 1000);
    -    });
    -  });
    -});
    -
    -describe('PrometheusDatasource for POST', function() {
    -  var ctx = new helpers.ServiceTestContext();
    -  var instanceSettings = {
    -    url: 'proxied',
    -    directUrl: 'direct',
    -    user: 'test',
    -    password: 'mupp',
    -    jsonData: { httpMethod: 'POST' },
    -  };
    -
    -  beforeEach(angularMocks.module('grafana.core'));
    -  beforeEach(angularMocks.module('grafana.services'));
    -  beforeEach(ctx.providePhase(['timeSrv']));
    -
    -  beforeEach(
    -    angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
    -      ctx.$q = $q;
    -      ctx.$httpBackend = $httpBackend;
    -      ctx.$rootScope = $rootScope;
    -      ctx.ds = $injector.instantiate(PrometheusDatasource, { instanceSettings: instanceSettings });
    -      $httpBackend.when('GET', /\.html$/).respond('');
    -    })
    -  );
    -
    -  describe('When querying prometheus with one target using query editor target spec', function() {
    -    var results;
    -    var urlExpected = 'proxied/api/v1/query_range';
    -    var dataExpected = $.param({
    -      query: 'test{job="testjob"}',
    -      start: 1 * 60,
    -      end: 3 * 60,
    -      step: 60,
    -    });
    -    var query = {
    -      range: { from: time({ minutes: 1, seconds: 3 }), to: time({ minutes: 2, seconds: 3 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    -      interval: '60s',
    -    };
    -    var response = {
    -      status: 'success',
    -      data: {
    -        resultType: 'matrix',
    -        result: [
    -          {
    -            metric: { __name__: 'test', job: 'testjob' },
    -            values: [[2 * 60, '3846']],
    -          },
    -        ],
    -      },
    -    };
    -    beforeEach(function() {
    -      ctx.$httpBackend.expectPOST(urlExpected, dataExpected).respond(response);
    -      ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -      ctx.$httpBackend.flush();
    -    });
    -    it('should generate the correct query', function() {
    -      ctx.$httpBackend.verifyNoOutstandingExpectation();
    -    });
    -    it('should return series list', function() {
    -      expect(results.data.length).to.be(1);
    -      expect(results.data[0].target).to.be('test{job="testjob"}');
    -    });
    -  });
    -});
    
    From 790aadf8ef3544eb0c1007042525c7ad54f611e2 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Wed, 1 Aug 2018 10:09:05 +0200
    Subject: [PATCH 260/263] Remove angularMocks
    
    ---
     .../app/plugins/datasource/prometheus/specs/_datasource.jest.ts  | 1 -
     1 file changed, 1 deletion(-)
    
    diff --git a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    index 2deab13a1010..efe2738cce94 100644
    --- a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    +++ b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    @@ -1,7 +1,6 @@
     import moment from 'moment';
     import { PrometheusDatasource } from '../datasource';
     import $q from 'q';
    -import { angularMocks } from 'test/lib/common';
     
     const SECOND = 1000;
     const MINUTE = 60 * SECOND;
    
    From 8d0c4cdc09c04a05f20d3988380613a3f9f1e87f Mon Sep 17 00:00:00 2001
    From: Marcus Efraimsson 
    Date: Wed, 1 Aug 2018 12:30:50 +0200
    Subject: [PATCH 261/263] changelog: add notes about closing #12561
    
    [skip ci]
    ---
     CHANGELOG.md | 1 +
     1 file changed, 1 insertion(+)
    
    diff --git a/CHANGELOG.md b/CHANGELOG.md
    index dde7ead6f136..aa089b5900b0 100644
    --- a/CHANGELOG.md
    +++ b/CHANGELOG.md
    @@ -25,6 +25,7 @@
     * **Cloudwatch**: Improved error handling [#12489](https://github.com/grafana/grafana/issues/12489), thx [@mtanda](https://github.com/mtanda)
     * **Cloudwatch**: AppSync metrics and dimensions [#12300](https://github.com/grafana/grafana/issues/12300), thx [@franciscocpg](https://github.com/franciscocpg)
     * **Cloudwatch**: Direct Connect metrics and dimensions [#12762](https://github.com/grafana/grafana/pulls/12762), thx [@mindriot88](https://github.com/mindriot88)
    +* **Cloudwatch**: Added BurstBalance metric to list of AWS RDS metrics [#12561](https://github.com/grafana/grafana/pulls/12561), thx [@activeshadow](https://github.com/activeshadow)
     * **Table**: Adjust header contrast for the light theme [#12668](https://github.com/grafana/grafana/issues/12668)
     * **Elasticsearch**: For alerting/backend, support having index name to the right of pattern in index pattern [#12731](https://github.com/grafana/grafana/issues/12731)
     * **OAuth**: Fix overriding tls_skip_verify_insecure using environment variable [#12747](https://github.com/grafana/grafana/issues/12747), thx [@jangaraj](https://github.com/jangaraj)
    
    From af32bfebefcc02170fbaa4104ae2e5883b5c1ba8 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Wed, 1 Aug 2018 14:26:29 +0200
    Subject: [PATCH 262/263] Add all tests to one file
    
    ---
     .../prometheus/specs/_datasource.jest.ts      | 829 ------------------
     .../prometheus/specs/datasource.jest.ts       | 794 +++++++++++++++++
     2 files changed, 794 insertions(+), 829 deletions(-)
     delete mode 100644 public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    
    diff --git a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    deleted file mode 100644
    index efe2738cce94..000000000000
    --- a/public/app/plugins/datasource/prometheus/specs/_datasource.jest.ts
    +++ /dev/null
    @@ -1,829 +0,0 @@
    -import moment from 'moment';
    -import { PrometheusDatasource } from '../datasource';
    -import $q from 'q';
    -
    -const SECOND = 1000;
    -const MINUTE = 60 * SECOND;
    -const HOUR = 60 * MINUTE;
    -
    -const time = ({ hours = 0, seconds = 0, minutes = 0 }) => moment(hours * HOUR + minutes * MINUTE + seconds * SECOND);
    -
    -let ctx = {};
    -let instanceSettings = {
    -  url: 'proxied',
    -  directUrl: 'direct',
    -  user: 'test',
    -  password: 'mupp',
    -  jsonData: { httpMethod: 'GET' },
    -};
    -let backendSrv = {
    -  datasourceRequest: jest.fn(),
    -};
    -
    -let templateSrv = {
    -  replace: jest.fn(str => str),
    -};
    -
    -let timeSrv = {
    -  timeRange: () => {
    -    return { to: { diff: () => 2000 }, from: '' };
    -  },
    -};
    -
    -describe('PrometheusDatasource', function() {
    -  //   beforeEach(angularMocks.module('grafana.core'));
    -  //   beforeEach(angularMocks.module('grafana.services'));
    -  //   beforeEach(ctx.providePhase(['timeSrv']));
    -
    -  //   beforeEach(
    -  //     angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
    -  //       ctx.$q = $q;
    -  //       ctx.$httpBackend = $httpBackend;
    -  //       ctx.$rootScope = $rootScope;
    -  //       ctx.ds = $injector.instantiate(PrometheusDatasource, {
    -  //         instanceSettings: instanceSettings,
    -  //       });
    -  //       $httpBackend.when('GET', /\.html$/).respond('');
    -  //     })
    -  //   );
    -
    -  describe('When querying prometheus with one target using query editor target spec', async () => {
    -    var results;
    -    var query = {
    -      range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    -      interval: '60s',
    -    };
    -    // Interval alignment with step
    -    var urlExpected =
    -      'proxied/api/v1/query_range?query=' + encodeURIComponent('test{job="testjob"}') + '&start=60&end=240&step=60';
    -
    -    beforeEach(async () => {
    -      let response = {
    -        data: {
    -          status: 'success',
    -          data: {
    -            resultType: 'matrix',
    -            result: [
    -              {
    -                metric: { __name__: 'test', job: 'testjob' },
    -                values: [[60, '3846']],
    -              },
    -            ],
    -          },
    -        },
    -      };
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -
    -      await ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -    });
    -
    -    it('should generate the correct query', function() {
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should return series list', async () => {
    -      expect(results.data.length).toBe(1);
    -      expect(results.data[0].target).toBe('test{job="testjob"}');
    -    });
    -  });
    -  describe('When querying prometheus with one target which return multiple series', function() {
    -    var results;
    -    var start = 60;
    -    var end = 360;
    -    var step = 60;
    -
    -    var query = {
    -      range: { from: time({ seconds: start }), to: time({ seconds: end }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    -      interval: '60s',
    -    };
    -
    -    beforeEach(async () => {
    -      let response = {
    -        status: 'success',
    -        data: {
    -          data: {
    -            resultType: 'matrix',
    -            result: [
    -              {
    -                metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
    -                values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
    -              },
    -              {
    -                metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
    -                values: [[start + step * 2, '4846']],
    -              },
    -            ],
    -          },
    -        },
    -      };
    -
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -
    -      await ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -    });
    -
    -    it('should be same length', function() {
    -      expect(results.data.length).toBe(2);
    -      expect(results.data[0].datapoints.length).toBe((end - start) / step + 1);
    -      expect(results.data[1].datapoints.length).toBe((end - start) / step + 1);
    -    });
    -
    -    it('should fill null until first datapoint in response', function() {
    -      expect(results.data[0].datapoints[0][1]).toBe(start * 1000);
    -      expect(results.data[0].datapoints[0][0]).toBe(null);
    -      expect(results.data[0].datapoints[1][1]).toBe((start + step * 1) * 1000);
    -      expect(results.data[0].datapoints[1][0]).toBe(3846);
    -    });
    -    it('should fill null after last datapoint in response', function() {
    -      var length = (end - start) / step + 1;
    -      expect(results.data[0].datapoints[length - 2][1]).toBe((end - step * 1) * 1000);
    -      expect(results.data[0].datapoints[length - 2][0]).toBe(3848);
    -      expect(results.data[0].datapoints[length - 1][1]).toBe(end * 1000);
    -      expect(results.data[0].datapoints[length - 1][0]).toBe(null);
    -    });
    -    it('should fill null at gap between series', function() {
    -      expect(results.data[0].datapoints[2][1]).toBe((start + step * 2) * 1000);
    -      expect(results.data[0].datapoints[2][0]).toBe(null);
    -      expect(results.data[1].datapoints[1][1]).toBe((start + step * 1) * 1000);
    -      expect(results.data[1].datapoints[1][0]).toBe(null);
    -      expect(results.data[1].datapoints[3][1]).toBe((start + step * 3) * 1000);
    -      expect(results.data[1].datapoints[3][0]).toBe(null);
    -    });
    -  });
    -  describe('When querying prometheus with one target and instant = true', function() {
    -    var results;
    -    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    -    var query = {
    -      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    -      interval: '60s',
    -    };
    -
    -    beforeEach(async () => {
    -      let response = {
    -        status: 'success',
    -        data: {
    -          data: {
    -            resultType: 'vector',
    -            result: [
    -              {
    -                metric: { __name__: 'test', job: 'testjob' },
    -                value: [123, '3846'],
    -              },
    -            ],
    -          },
    -        },
    -      };
    -
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -
    -      await ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -    });
    -    it('should generate the correct query', function() {
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should return series list', function() {
    -      expect(results.data.length).toBe(1);
    -      expect(results.data[0].target).toBe('test{job="testjob"}');
    -    });
    -  });
    -  describe('When performing annotationQuery', function() {
    -    var results;
    -
    -    var options = {
    -      annotation: {
    -        expr: 'ALERTS{alertstate="firing"}',
    -        tagKeys: 'job',
    -        titleFormat: '{{alertname}}',
    -        textFormat: '{{instance}}',
    -      },
    -      range: {
    -        from: time({ seconds: 63 }),
    -        to: time({ seconds: 123 }),
    -      },
    -    };
    -
    -    beforeEach(async () => {
    -      let response = {
    -        status: 'success',
    -        data: {
    -          data: {
    -            resultType: 'matrix',
    -            result: [
    -              {
    -                metric: {
    -                  __name__: 'ALERTS',
    -                  alertname: 'InstanceDown',
    -                  alertstate: 'firing',
    -                  instance: 'testinstance',
    -                  job: 'testjob',
    -                },
    -                values: [[123, '1']],
    -              },
    -            ],
    -          },
    -        },
    -      };
    -
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -
    -      await ctx.ds.annotationQuery(options).then(function(data) {
    -        results = data;
    -      });
    -    });
    -    it('should return annotation list', function() {
    -      //   ctx.$rootScope.$apply();
    -      expect(results.length).toBe(1);
    -      expect(results[0].tags).toContain('testjob');
    -      expect(results[0].title).toBe('InstanceDown');
    -      expect(results[0].text).toBe('testinstance');
    -      expect(results[0].time).toBe(123 * 1000);
    -    });
    -  });
    -
    -  describe('When resultFormat is table and instant = true', function() {
    -    var results;
    -    // var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    -    var query = {
    -      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    -      interval: '60s',
    -    };
    -
    -    beforeEach(async () => {
    -      let response = {
    -        status: 'success',
    -        data: {
    -          data: {
    -            resultType: 'vector',
    -            result: [
    -              {
    -                metric: { __name__: 'test', job: 'testjob' },
    -                value: [123, '3846'],
    -              },
    -            ],
    -          },
    -        },
    -      };
    -
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -    });
    -
    -    it('should return result', () => {
    -      expect(results).not.toBe(null);
    -    });
    -  });
    -
    -  describe('The "step" query parameter', function() {
    -    var response = {
    -      status: 'success',
    -      data: {
    -        data: {
    -          resultType: 'matrix',
    -          result: [],
    -        },
    -      },
    -    };
    -
    -    it('should be min interval when greater than auto interval', async () => {
    -      let query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '10s',
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      let urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    -
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -
    -    it('step should never go below 1', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [{ expr: 'test' }],
    -        interval: '100ms',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=1';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -
    -    it('should be auto interval when greater than min interval', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '5s',
    -          },
    -        ],
    -        interval: '10s',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should result in querying fewer than 11000 data points', async () => {
    -      var query = {
    -        // 6 hour range
    -        range: { from: time({ hours: 1 }), to: time({ hours: 7 }) },
    -        targets: [{ expr: 'test' }],
    -        interval: '1s',
    -      };
    -      var end = 7 * 60 * 60;
    -      var start = 60 * 60;
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=' + start + '&end=' + end + '&step=2';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should not apply min interval when interval * intervalFactor greater', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '10s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      // times get rounded up to interval
    -      var urlExpected = 'proxied/api/v1/query_range?query=test&start=50&end=450&step=50';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should apply min interval when interval * intervalFactor smaller', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '15s',
    -            intervalFactor: 2,
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=60&end=420&step=15';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should apply intervalFactor to auto interval when greater', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            interval: '5s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '10s',
    -      };
    -      // times get aligned to interval
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=0&end=500&step=100';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should not not be affected by the 11000 data points limit when large enough', async () => {
    -      var query = {
    -        // 1 week range
    -        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '10s',
    -      };
    -      var end = 7 * 24 * 60 * 60;
    -      var start = 0;
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=100';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -    it('should be determined by the 11000 data points limit when too small', async () => {
    -      var query = {
    -        // 1 week range
    -        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    -        targets: [
    -          {
    -            expr: 'test',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -      };
    -      var end = 7 * 24 * 60 * 60;
    -      var start = 0;
    -      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=60';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -    });
    -  });
    -
    -  describe('The __interval and __interval_ms template variables', function() {
    -    var response = {
    -      status: 'success',
    -      data: {
    -        data: {
    -          resultType: 'matrix',
    -          result: [],
    -        },
    -      },
    -    };
    -
    -    it('should be unchanged when auto interval is greater than min interval', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '5s',
    -          },
    -        ],
    -        interval: '10s',
    -        scopedVars: {
    -          __interval: { text: '10s', value: '10s' },
    -          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    -        },
    -      };
    -
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[$__interval])') +
    -        '&start=60&end=420&step=10';
    -
    -      templateSrv.replace = jest.fn(str => str);
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -
    -      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    -        __interval: {
    -          text: '10s',
    -          value: '10s',
    -        },
    -        __interval_ms: {
    -          text: 10000,
    -          value: 10000,
    -        },
    -      });
    -    });
    -    it('should be min interval when it is greater than auto interval', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '10s',
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[$__interval])') +
    -        '&start=60&end=420&step=10';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      templateSrv.replace = jest.fn(str => str);
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -
    -      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    -        __interval: {
    -          text: '5s',
    -          value: '5s',
    -        },
    -        __interval_ms: {
    -          text: 5000,
    -          value: 5000,
    -        },
    -      });
    -    });
    -    it('should account for intervalFactor', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '5s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '10s',
    -        scopedVars: {
    -          __interval: { text: '10s', value: '10s' },
    -          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[$__interval])') +
    -        '&start=0&end=500&step=100';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      templateSrv.replace = jest.fn(str => str);
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -
    -      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    -        __interval: {
    -          text: '10s',
    -          value: '10s',
    -        },
    -        __interval_ms: {
    -          text: 10000,
    -          value: 10000,
    -        },
    -      });
    -
    -      expect(query.scopedVars.__interval.text).toBe('10s');
    -      expect(query.scopedVars.__interval.value).toBe('10s');
    -      expect(query.scopedVars.__interval_ms.text).toBe(10 * 1000);
    -      expect(query.scopedVars.__interval_ms.value).toBe(10 * 1000);
    -    });
    -    it('should be interval * intervalFactor when greater than min interval', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '10s',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[$__interval])') +
    -        '&start=50&end=450&step=50';
    -
    -      templateSrv.replace = jest.fn(str => str);
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -
    -      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    -        __interval: {
    -          text: '5s',
    -          value: '5s',
    -        },
    -        __interval_ms: {
    -          text: 5000,
    -          value: 5000,
    -        },
    -      });
    -    });
    -    it('should be min interval when greater than interval * intervalFactor', async () => {
    -      var query = {
    -        // 6 minute range
    -        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            interval: '15s',
    -            intervalFactor: 2,
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[$__interval])') +
    -        '&start=60&end=420&step=15';
    -
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -
    -      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    -        __interval: {
    -          text: '5s',
    -          value: '5s',
    -        },
    -        __interval_ms: {
    -          text: 5000,
    -          value: 5000,
    -        },
    -      });
    -    });
    -    it('should be determined by the 11000 data points limit, accounting for intervalFactor', async () => {
    -      var query = {
    -        // 1 week range
    -        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    -        targets: [
    -          {
    -            expr: 'rate(test[$__interval])',
    -            intervalFactor: 10,
    -          },
    -        ],
    -        interval: '5s',
    -        scopedVars: {
    -          __interval: { text: '5s', value: '5s' },
    -          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    -        },
    -      };
    -      var end = 7 * 24 * 60 * 60;
    -      var start = 0;
    -      var urlExpected =
    -        'proxied/api/v1/query_range?query=' +
    -        encodeURIComponent('rate(test[$__interval])') +
    -        '&start=' +
    -        start +
    -        '&end=' +
    -        end +
    -        '&step=60';
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      templateSrv.replace = jest.fn(str => str);
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query);
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('GET');
    -      expect(res.url).toBe(urlExpected);
    -
    -      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    -        __interval: {
    -          text: '5s',
    -          value: '5s',
    -        },
    -        __interval_ms: {
    -          text: 5000,
    -          value: 5000,
    -        },
    -      });
    -    });
    -  });
    -});
    -
    -describe('PrometheusDatasource for POST', function() {
    -  //   var ctx = new helpers.ServiceTestContext();
    -  let instanceSettings = {
    -    url: 'proxied',
    -    directUrl: 'direct',
    -    user: 'test',
    -    password: 'mupp',
    -    jsonData: { httpMethod: 'POST' },
    -  };
    -
    -  //   beforeEach(angularMocks.module('grafana.core'));
    -  //   beforeEach(angularMocks.module('grafana.services'));
    -  //   beforeEach(ctx.providePhase(['timeSrv']));
    -
    -  //   beforeEach(
    -  //     // angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
    -  //     //   ctx.$q = $q;
    -  //     //   ctx.$httpBackend = $httpBackend;
    -  //     //   ctx.$rootScope = $rootScope;
    -  //     //   ctx.ds = $injector.instantiate(PrometheusDatasource, { instanceSettings: instanceSettings });
    -  //     //   $httpBackend.when('GET', /\.html$/).respond('');
    -  //     // })
    -  //   );
    -
    -  describe('When querying prometheus with one target using query editor target spec', function() {
    -    var results;
    -    var urlExpected = 'proxied/api/v1/query_range';
    -    var dataExpected = {
    -      query: 'test{job="testjob"}',
    -      start: 1 * 60,
    -      end: 3 * 60,
    -      step: 60,
    -    };
    -    var query = {
    -      range: { from: time({ minutes: 1, seconds: 3 }), to: time({ minutes: 2, seconds: 3 }) },
    -      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    -      interval: '60s',
    -    };
    -
    -    beforeEach(async () => {
    -      let response = {
    -        status: 'success',
    -        data: {
    -          data: {
    -            resultType: 'matrix',
    -            result: [
    -              {
    -                metric: { __name__: 'test', job: 'testjob' },
    -                values: [[2 * 60, '3846']],
    -              },
    -            ],
    -          },
    -        },
    -      };
    -      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    -      ctx.ds = new PrometheusDatasource(instanceSettings, $q, backendSrv, templateSrv, timeSrv);
    -      await ctx.ds.query(query).then(function(data) {
    -        results = data;
    -      });
    -    });
    -    it('should generate the correct query', function() {
    -      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    -      expect(res.method).toBe('POST');
    -      expect(res.url).toBe(urlExpected);
    -      expect(res.data).toEqual(dataExpected);
    -    });
    -    it('should return series list', function() {
    -      expect(results.data.length).toBe(1);
    -      expect(results.data[0].target).toBe('test{job="testjob"}');
    -    });
    -  });
    -});
    diff --git a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
    index b8b2b50f5909..f60af583f451 100644
    --- a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
    +++ b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
    @@ -246,3 +246,797 @@ describe('PrometheusDatasource', () => {
         });
       });
     });
    +
    +const SECOND = 1000;
    +const MINUTE = 60 * SECOND;
    +const HOUR = 60 * MINUTE;
    +
    +const time = ({ hours = 0, seconds = 0, minutes = 0 }) => moment(hours * HOUR + minutes * MINUTE + seconds * SECOND);
    +
    +let ctx = {};
    +let instanceSettings = {
    +  url: 'proxied',
    +  directUrl: 'direct',
    +  user: 'test',
    +  password: 'mupp',
    +  jsonData: { httpMethod: 'GET' },
    +};
    +let backendSrv = {
    +  datasourceRequest: jest.fn(),
    +};
    +
    +let templateSrv = {
    +  replace: jest.fn(str => str),
    +};
    +
    +let timeSrv = {
    +  timeRange: () => {
    +    return { to: { diff: () => 2000 }, from: '' };
    +  },
    +};
    +
    +describe('PrometheusDatasource', function() {
    +  describe('When querying prometheus with one target using query editor target spec', async () => {
    +    var results;
    +    var query = {
    +      range: { from: time({ seconds: 63 }), to: time({ seconds: 183 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    +      interval: '60s',
    +    };
    +    // Interval alignment with step
    +    var urlExpected =
    +      'proxied/api/v1/query_range?query=' + encodeURIComponent('test{job="testjob"}') + '&start=60&end=240&step=60';
    +
    +    beforeEach(async () => {
    +      let response = {
    +        data: {
    +          status: 'success',
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                values: [[60, '3846']],
    +              },
    +            ],
    +          },
    +        },
    +      };
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +
    +    it('should generate the correct query', function() {
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should return series list', async () => {
    +      expect(results.data.length).toBe(1);
    +      expect(results.data[0].target).toBe('test{job="testjob"}');
    +    });
    +  });
    +  describe('When querying prometheus with one target which return multiple series', function() {
    +    var results;
    +    var start = 60;
    +    var end = 360;
    +    var step = 60;
    +
    +    var query = {
    +      range: { from: time({ seconds: start }), to: time({ seconds: end }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    +      interval: '60s',
    +    };
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
    +        data: {
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob', series: 'series 1' },
    +                values: [[start + step * 1, '3846'], [start + step * 3, '3847'], [end - step * 1, '3848']],
    +              },
    +              {
    +                metric: { __name__: 'test', job: 'testjob', series: 'series 2' },
    +                values: [[start + step * 2, '4846']],
    +              },
    +            ],
    +          },
    +        },
    +      };
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +
    +    it('should be same length', function() {
    +      expect(results.data.length).toBe(2);
    +      expect(results.data[0].datapoints.length).toBe((end - start) / step + 1);
    +      expect(results.data[1].datapoints.length).toBe((end - start) / step + 1);
    +    });
    +
    +    it('should fill null until first datapoint in response', function() {
    +      expect(results.data[0].datapoints[0][1]).toBe(start * 1000);
    +      expect(results.data[0].datapoints[0][0]).toBe(null);
    +      expect(results.data[0].datapoints[1][1]).toBe((start + step * 1) * 1000);
    +      expect(results.data[0].datapoints[1][0]).toBe(3846);
    +    });
    +    it('should fill null after last datapoint in response', function() {
    +      var length = (end - start) / step + 1;
    +      expect(results.data[0].datapoints[length - 2][1]).toBe((end - step * 1) * 1000);
    +      expect(results.data[0].datapoints[length - 2][0]).toBe(3848);
    +      expect(results.data[0].datapoints[length - 1][1]).toBe(end * 1000);
    +      expect(results.data[0].datapoints[length - 1][0]).toBe(null);
    +    });
    +    it('should fill null at gap between series', function() {
    +      expect(results.data[0].datapoints[2][1]).toBe((start + step * 2) * 1000);
    +      expect(results.data[0].datapoints[2][0]).toBe(null);
    +      expect(results.data[1].datapoints[1][1]).toBe((start + step * 1) * 1000);
    +      expect(results.data[1].datapoints[1][0]).toBe(null);
    +      expect(results.data[1].datapoints[3][1]).toBe((start + step * 3) * 1000);
    +      expect(results.data[1].datapoints[3][0]).toBe(null);
    +    });
    +  });
    +  describe('When querying prometheus with one target and instant = true', function() {
    +    var results;
    +    var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
    +    var query = {
    +      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    +      interval: '60s',
    +    };
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
    +        data: {
    +          data: {
    +            resultType: 'vector',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                value: [123, '3846'],
    +              },
    +            ],
    +          },
    +        },
    +      };
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should generate the correct query', function() {
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should return series list', function() {
    +      expect(results.data.length).toBe(1);
    +      expect(results.data[0].target).toBe('test{job="testjob"}');
    +    });
    +  });
    +  describe('When performing annotationQuery', function() {
    +    var results;
    +
    +    var options = {
    +      annotation: {
    +        expr: 'ALERTS{alertstate="firing"}',
    +        tagKeys: 'job',
    +        titleFormat: '{{alertname}}',
    +        textFormat: '{{instance}}',
    +      },
    +      range: {
    +        from: time({ seconds: 63 }),
    +        to: time({ seconds: 123 }),
    +      },
    +    };
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
    +        data: {
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: {
    +                  __name__: 'ALERTS',
    +                  alertname: 'InstanceDown',
    +                  alertstate: 'firing',
    +                  instance: 'testinstance',
    +                  job: 'testjob',
    +                },
    +                values: [[123, '1']],
    +              },
    +            ],
    +          },
    +        },
    +      };
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +
    +      await ctx.ds.annotationQuery(options).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should return annotation list', function() {
    +      expect(results.length).toBe(1);
    +      expect(results[0].tags).toContain('testjob');
    +      expect(results[0].title).toBe('InstanceDown');
    +      expect(results[0].text).toBe('testinstance');
    +      expect(results[0].time).toBe(123 * 1000);
    +    });
    +  });
    +
    +  describe('When resultFormat is table and instant = true', function() {
    +    var results;
    +    var query = {
    +      range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series', instant: true }],
    +      interval: '60s',
    +    };
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
    +        data: {
    +          data: {
    +            resultType: 'vector',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                value: [123, '3846'],
    +              },
    +            ],
    +          },
    +        },
    +      };
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +
    +    it('should return result', () => {
    +      expect(results).not.toBe(null);
    +    });
    +  });
    +
    +  describe('The "step" query parameter', function() {
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [],
    +        },
    +      },
    +    };
    +
    +    it('should be min interval when greater than auto interval', async () => {
    +      let query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '10s',
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      let urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +
    +    it('step should never go below 1', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [{ expr: 'test' }],
    +        interval: '100ms',
    +      };
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=1';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +
    +    it('should be auto interval when greater than min interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '5s',
    +          },
    +        ],
    +        interval: '10s',
    +      };
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=60&end=420&step=10';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should result in querying fewer than 11000 data points', async () => {
    +      var query = {
    +        // 6 hour range
    +        range: { from: time({ hours: 1 }), to: time({ hours: 7 }) },
    +        targets: [{ expr: 'test' }],
    +        interval: '1s',
    +      };
    +      var end = 7 * 60 * 60;
    +      var start = 60 * 60;
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=' + start + '&end=' + end + '&step=2';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should not apply min interval when interval * intervalFactor greater', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '10s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      // times get rounded up to interval
    +      var urlExpected = 'proxied/api/v1/query_range?query=test&start=50&end=450&step=50';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should apply min interval when interval * intervalFactor smaller', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '15s',
    +            intervalFactor: 2,
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=60&end=420&step=15';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should apply intervalFactor to auto interval when greater', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            interval: '5s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '10s',
    +      };
    +      // times get aligned to interval
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=0&end=500&step=100';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should not not be affected by the 11000 data points limit when large enough', async () => {
    +      var query = {
    +        // 1 week range
    +        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '10s',
    +      };
    +      var end = 7 * 24 * 60 * 60;
    +      var start = 0;
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=100';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +    it('should be determined by the 11000 data points limit when too small', async () => {
    +      var query = {
    +        // 1 week range
    +        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    +        targets: [
    +          {
    +            expr: 'test',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +      };
    +      var end = 7 * 24 * 60 * 60;
    +      var start = 0;
    +      var urlExpected = 'proxied/api/v1/query_range?query=test' + '&start=' + start + '&end=' + end + '&step=60';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +    });
    +  });
    +
    +  describe('The __interval and __interval_ms template variables', function() {
    +    var response = {
    +      status: 'success',
    +      data: {
    +        data: {
    +          resultType: 'matrix',
    +          result: [],
    +        },
    +      },
    +    };
    +
    +    it('should be unchanged when auto interval is greater than min interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '5s',
    +          },
    +        ],
    +        interval: '10s',
    +        scopedVars: {
    +          __interval: { text: '10s', value: '10s' },
    +          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    +        },
    +      };
    +
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=60&end=420&step=10';
    +
    +      templateSrv.replace = jest.fn(str => str);
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '10s',
    +          value: '10s',
    +        },
    +        __interval_ms: {
    +          text: 10000,
    +          value: 10000,
    +        },
    +      });
    +    });
    +    it('should be min interval when it is greater than auto interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '10s',
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=60&end=420&step=10';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      templateSrv.replace = jest.fn(str => str);
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
    +    });
    +    it('should account for intervalFactor', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '5s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '10s',
    +        scopedVars: {
    +          __interval: { text: '10s', value: '10s' },
    +          __interval_ms: { text: 10 * 1000, value: 10 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=0&end=500&step=100';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      templateSrv.replace = jest.fn(str => str);
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '10s',
    +          value: '10s',
    +        },
    +        __interval_ms: {
    +          text: 10000,
    +          value: 10000,
    +        },
    +      });
    +
    +      expect(query.scopedVars.__interval.text).toBe('10s');
    +      expect(query.scopedVars.__interval.value).toBe('10s');
    +      expect(query.scopedVars.__interval_ms.text).toBe(10 * 1000);
    +      expect(query.scopedVars.__interval_ms.value).toBe(10 * 1000);
    +    });
    +    it('should be interval * intervalFactor when greater than min interval', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '10s',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=50&end=450&step=50';
    +
    +      templateSrv.replace = jest.fn(str => str);
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
    +    });
    +    it('should be min interval when greater than interval * intervalFactor', async () => {
    +      var query = {
    +        // 6 minute range
    +        range: { from: time({ minutes: 1 }), to: time({ minutes: 7 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            interval: '15s',
    +            intervalFactor: 2,
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=60&end=420&step=15';
    +
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
    +    });
    +    it('should be determined by the 11000 data points limit, accounting for intervalFactor', async () => {
    +      var query = {
    +        // 1 week range
    +        range: { from: time({}), to: time({ hours: 7 * 24 }) },
    +        targets: [
    +          {
    +            expr: 'rate(test[$__interval])',
    +            intervalFactor: 10,
    +          },
    +        ],
    +        interval: '5s',
    +        scopedVars: {
    +          __interval: { text: '5s', value: '5s' },
    +          __interval_ms: { text: 5 * 1000, value: 5 * 1000 },
    +        },
    +      };
    +      var end = 7 * 24 * 60 * 60;
    +      var start = 0;
    +      var urlExpected =
    +        'proxied/api/v1/query_range?query=' +
    +        encodeURIComponent('rate(test[$__interval])') +
    +        '&start=' +
    +        start +
    +        '&end=' +
    +        end +
    +        '&step=60';
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      templateSrv.replace = jest.fn(str => str);
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query);
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('GET');
    +      expect(res.url).toBe(urlExpected);
    +
    +      expect(templateSrv.replace.mock.calls[0][1]).toEqual({
    +        __interval: {
    +          text: '5s',
    +          value: '5s',
    +        },
    +        __interval_ms: {
    +          text: 5000,
    +          value: 5000,
    +        },
    +      });
    +    });
    +  });
    +});
    +
    +describe('PrometheusDatasource for POST', function() {
    +  //   var ctx = new helpers.ServiceTestContext();
    +  let instanceSettings = {
    +    url: 'proxied',
    +    directUrl: 'direct',
    +    user: 'test',
    +    password: 'mupp',
    +    jsonData: { httpMethod: 'POST' },
    +  };
    +
    +  describe('When querying prometheus with one target using query editor target spec', function() {
    +    var results;
    +    var urlExpected = 'proxied/api/v1/query_range';
    +    var dataExpected = {
    +      query: 'test{job="testjob"}',
    +      start: 1 * 60,
    +      end: 3 * 60,
    +      step: 60,
    +    };
    +    var query = {
    +      range: { from: time({ minutes: 1, seconds: 3 }), to: time({ minutes: 2, seconds: 3 }) },
    +      targets: [{ expr: 'test{job="testjob"}', format: 'time_series' }],
    +      interval: '60s',
    +    };
    +
    +    beforeEach(async () => {
    +      let response = {
    +        status: 'success',
    +        data: {
    +          data: {
    +            resultType: 'matrix',
    +            result: [
    +              {
    +                metric: { __name__: 'test', job: 'testjob' },
    +                values: [[2 * 60, '3846']],
    +              },
    +            ],
    +          },
    +        },
    +      };
    +      backendSrv.datasourceRequest = jest.fn(() => Promise.resolve(response));
    +      ctx.ds = new PrometheusDatasource(instanceSettings, q, backendSrv, templateSrv, timeSrv);
    +      await ctx.ds.query(query).then(function(data) {
    +        results = data;
    +      });
    +    });
    +    it('should generate the correct query', function() {
    +      let res = backendSrv.datasourceRequest.mock.calls[0][0];
    +      expect(res.method).toBe('POST');
    +      expect(res.url).toBe(urlExpected);
    +      expect(res.data).toEqual(dataExpected);
    +    });
    +    it('should return series list', function() {
    +      expect(results.data.length).toBe(1);
    +      expect(results.data[0].target).toBe('test{job="testjob"}');
    +    });
    +  });
    +});
    
    From 951b623bd23ca1aa43833e2898876579c8417370 Mon Sep 17 00:00:00 2001
    From: Tobias Skarhed 
    Date: Wed, 1 Aug 2018 14:27:45 +0200
    Subject: [PATCH 263/263] Change to arrow functions
    
    ---
     .../prometheus/specs/datasource.jest.ts       | 66 +++++++++----------
     1 file changed, 33 insertions(+), 33 deletions(-)
    
    diff --git a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
    index f60af583f451..aeca8d691913 100644
    --- a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
    +++ b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts
    @@ -150,49 +150,49 @@ describe('PrometheusDatasource', () => {
         });
       });
     
    -  describe('alignRange', function() {
    -    it('does not modify already aligned intervals with perfect step', function() {
    +  describe('alignRange', () => {
    +    it('does not modify already aligned intervals with perfect step', () => {
           const range = alignRange(0, 3, 3);
           expect(range.start).toEqual(0);
           expect(range.end).toEqual(3);
         });
    -    it('does modify end-aligned intervals to reflect number of steps possible', function() {
    +    it('does modify end-aligned intervals to reflect number of steps possible', () => {
           const range = alignRange(1, 6, 3);
           expect(range.start).toEqual(0);
           expect(range.end).toEqual(6);
         });
    -    it('does align intervals that are a multiple of steps', function() {
    +    it('does align intervals that are a multiple of steps', () => {
           const range = alignRange(1, 4, 3);
           expect(range.start).toEqual(0);
           expect(range.end).toEqual(6);
         });
    -    it('does align intervals that are not a multiple of steps', function() {
    +    it('does align intervals that are not a multiple of steps', () => {
           const range = alignRange(1, 5, 3);
           expect(range.start).toEqual(0);
           expect(range.end).toEqual(6);
         });
       });
     
    -  describe('Prometheus regular escaping', function() {
    -    it('should not escape non-string', function() {
    +  describe('Prometheus regular escaping', () => {
    +    it('should not escape non-string', () => {
           expect(prometheusRegularEscape(12)).toEqual(12);
         });
    -    it('should not escape simple string', function() {
    +    it('should not escape simple string', () => {
           expect(prometheusRegularEscape('cryptodepression')).toEqual('cryptodepression');
         });
    -    it("should escape '", function() {
    +    it("should escape '", () => {
           expect(prometheusRegularEscape("looking'glass")).toEqual("looking\\\\'glass");
         });
    -    it('should escape multiple characters', function() {
    +    it('should escape multiple characters', () => {
           expect(prometheusRegularEscape("'looking'glass'")).toEqual("\\\\'looking\\\\'glass\\\\'");
         });
       });
     
    -  describe('Prometheus regexes escaping', function() {
    -    it('should not escape simple string', function() {
    +  describe('Prometheus regexes escaping', () => {
    +    it('should not escape simple string', () => {
           expect(prometheusSpecialRegexEscape('cryptodepression')).toEqual('cryptodepression');
         });
    -    it('should escape $^*+?.()\\', function() {
    +    it('should escape $^*+?.()\\', () => {
           expect(prometheusSpecialRegexEscape("looking'glass")).toEqual("looking\\\\'glass");
           expect(prometheusSpecialRegexEscape('looking{glass')).toEqual('looking\\\\{glass');
           expect(prometheusSpecialRegexEscape('looking}glass')).toEqual('looking\\\\}glass');
    @@ -208,7 +208,7 @@ describe('PrometheusDatasource', () => {
           expect(prometheusSpecialRegexEscape('looking)glass')).toEqual('looking\\\\)glass');
           expect(prometheusSpecialRegexEscape('looking\\glass')).toEqual('looking\\\\\\\\glass');
         });
    -    it('should escape multiple special characters', function() {
    +    it('should escape multiple special characters', () => {
           expect(prometheusSpecialRegexEscape('+looking$glass?')).toEqual('\\\\+looking\\\\$glass\\\\?');
         });
       });
    @@ -275,7 +275,7 @@ let timeSrv = {
       },
     };
     
    -describe('PrometheusDatasource', function() {
    +describe('PrometheusDatasource', () => {
       describe('When querying prometheus with one target using query editor target spec', async () => {
         var results;
         var query = {
    @@ -310,7 +310,7 @@ describe('PrometheusDatasource', function() {
           });
         });
     
    -    it('should generate the correct query', function() {
    +    it('should generate the correct query', () => {
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
    @@ -320,7 +320,7 @@ describe('PrometheusDatasource', function() {
           expect(results.data[0].target).toBe('test{job="testjob"}');
         });
       });
    -  describe('When querying prometheus with one target which return multiple series', function() {
    +  describe('When querying prometheus with one target which return multiple series', () => {
         var results;
         var start = 60;
         var end = 360;
    @@ -360,26 +360,26 @@ describe('PrometheusDatasource', function() {
           });
         });
     
    -    it('should be same length', function() {
    +    it('should be same length', () => {
           expect(results.data.length).toBe(2);
           expect(results.data[0].datapoints.length).toBe((end - start) / step + 1);
           expect(results.data[1].datapoints.length).toBe((end - start) / step + 1);
         });
     
    -    it('should fill null until first datapoint in response', function() {
    +    it('should fill null until first datapoint in response', () => {
           expect(results.data[0].datapoints[0][1]).toBe(start * 1000);
           expect(results.data[0].datapoints[0][0]).toBe(null);
           expect(results.data[0].datapoints[1][1]).toBe((start + step * 1) * 1000);
           expect(results.data[0].datapoints[1][0]).toBe(3846);
         });
    -    it('should fill null after last datapoint in response', function() {
    +    it('should fill null after last datapoint in response', () => {
           var length = (end - start) / step + 1;
           expect(results.data[0].datapoints[length - 2][1]).toBe((end - step * 1) * 1000);
           expect(results.data[0].datapoints[length - 2][0]).toBe(3848);
           expect(results.data[0].datapoints[length - 1][1]).toBe(end * 1000);
           expect(results.data[0].datapoints[length - 1][0]).toBe(null);
         });
    -    it('should fill null at gap between series', function() {
    +    it('should fill null at gap between series', () => {
           expect(results.data[0].datapoints[2][1]).toBe((start + step * 2) * 1000);
           expect(results.data[0].datapoints[2][0]).toBe(null);
           expect(results.data[1].datapoints[1][1]).toBe((start + step * 1) * 1000);
    @@ -388,7 +388,7 @@ describe('PrometheusDatasource', function() {
           expect(results.data[1].datapoints[3][0]).toBe(null);
         });
       });
    -  describe('When querying prometheus with one target and instant = true', function() {
    +  describe('When querying prometheus with one target and instant = true', () => {
         var results;
         var urlExpected = 'proxied/api/v1/query?query=' + encodeURIComponent('test{job="testjob"}') + '&time=123';
         var query = {
    @@ -420,17 +420,17 @@ describe('PrometheusDatasource', function() {
             results = data;
           });
         });
    -    it('should generate the correct query', function() {
    +    it('should generate the correct query', () => {
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('GET');
           expect(res.url).toBe(urlExpected);
         });
    -    it('should return series list', function() {
    +    it('should return series list', () => {
           expect(results.data.length).toBe(1);
           expect(results.data[0].target).toBe('test{job="testjob"}');
         });
       });
    -  describe('When performing annotationQuery', function() {
    +  describe('When performing annotationQuery', () => {
         var results;
     
         var options = {
    @@ -475,7 +475,7 @@ describe('PrometheusDatasource', function() {
             results = data;
           });
         });
    -    it('should return annotation list', function() {
    +    it('should return annotation list', () => {
           expect(results.length).toBe(1);
           expect(results[0].tags).toContain('testjob');
           expect(results[0].title).toBe('InstanceDown');
    @@ -484,7 +484,7 @@ describe('PrometheusDatasource', function() {
         });
       });
     
    -  describe('When resultFormat is table and instant = true', function() {
    +  describe('When resultFormat is table and instant = true', () => {
         var results;
         var query = {
           range: { from: time({ seconds: 63 }), to: time({ seconds: 123 }) },
    @@ -520,7 +520,7 @@ describe('PrometheusDatasource', function() {
         });
       });
     
    -  describe('The "step" query parameter', function() {
    +  describe('The "step" query parameter', () => {
         var response = {
           status: 'success',
           data: {
    @@ -717,7 +717,7 @@ describe('PrometheusDatasource', function() {
         });
       });
     
    -  describe('The __interval and __interval_ms template variables', function() {
    +  describe('The __interval and __interval_ms template variables', () => {
         var response = {
           status: 'success',
           data: {
    @@ -982,7 +982,7 @@ describe('PrometheusDatasource', function() {
       });
     });
     
    -describe('PrometheusDatasource for POST', function() {
    +describe('PrometheusDatasource for POST', () => {
       //   var ctx = new helpers.ServiceTestContext();
       let instanceSettings = {
         url: 'proxied',
    @@ -992,7 +992,7 @@ describe('PrometheusDatasource for POST', function() {
         jsonData: { httpMethod: 'POST' },
       };
     
    -  describe('When querying prometheus with one target using query editor target spec', function() {
    +  describe('When querying prometheus with one target using query editor target spec', () => {
         var results;
         var urlExpected = 'proxied/api/v1/query_range';
         var dataExpected = {
    @@ -1028,13 +1028,13 @@ describe('PrometheusDatasource for POST', function() {
             results = data;
           });
         });
    -    it('should generate the correct query', function() {
    +    it('should generate the correct query', () => {
           let res = backendSrv.datasourceRequest.mock.calls[0][0];
           expect(res.method).toBe('POST');
           expect(res.url).toBe(urlExpected);
           expect(res.data).toEqual(dataExpected);
         });
    -    it('should return series list', function() {
    +    it('should return series list', () => {
           expect(results.data.length).toBe(1);
           expect(results.data[0].target).toBe('test{job="testjob"}');
         });