Skip to content

Commit

Permalink
Reestablish compatibility with Grafana 6
Browse files Browse the repository at this point in the history
  • Loading branch information
amotl committed Jan 22, 2022
1 parent dc9ac0f commit 1c7e2a8
Show file tree
Hide file tree
Showing 6 changed files with 21 additions and 16 deletions.
1 change: 1 addition & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ in progress
- CI: Prepare test suite for testing two different dashboard schema versions, v27 and v33
- Improve determinism by returning stable sort order of dashboard results
- Improve compatibility with Grafana 8.3 by handling dashboard schema version 33 properly
- Reestablish compatibility with Grafana 6

2021-12-11 0.12.0
=================
Expand Down
20 changes: 11 additions & 9 deletions grafana_wtf/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import colored
import requests
import requests_cache
from munch import Munch, munchify
from munch import Munch, munchify, unmunchify
from tqdm import tqdm

from grafana_wtf.model import (
Expand Down Expand Up @@ -392,9 +392,8 @@ def explore_datasources(self):
# Compute list of exploration items, associating datasources with the dashboards that use them.
results_used = []
results_unused = []
for ds_identifier in sorted(ix.datasource_by_ident):

datasource = ix.datasource_by_ident[ds_identifier]
for datasource in ix.datasources:
ds_identifier = datasource.get("uid", datasource.get("name"))
dashboard_uids = ix.datasource_dashboard_index.get(ds_identifier, [])
dashboards = list(map(ix.dashboard_by_uid.get, dashboard_uids))
item = DatasourceExplorationItem(datasource=datasource, used_in=dashboards, grafana_url=self.grafana_url)
Expand All @@ -405,7 +404,8 @@ def explore_datasources(self):
if dashboard_uids:
results_used.append(result)
else:
results_unused.append(result)
if result not in results_unused:
results_unused.append(result)

results_used = sorted(results_used, key=lambda x: x["datasource"]["name"] or x["datasource"]["uid"])
results_unused = sorted(results_unused, key=lambda x: x["datasource"]["name"] or x["datasource"]["uid"])
Expand Down Expand Up @@ -530,16 +530,18 @@ def index_datasources(self):
self.datasource_dashboard_index = {}

for datasource in self.datasources:
datasource_name_or_uid = datasource.uid or datasource.name
self.datasource_by_ident[datasource_name_or_uid] = datasource
self.datasource_by_uid[datasource.uid] = datasource
self.datasource_by_ident[datasource.name] = datasource
self.datasource_by_name[datasource.name] = datasource
if "uid" in datasource:
self.datasource_by_ident[datasource.uid] = datasource
self.datasource_by_uid[datasource.uid] = datasource

for dashboard_uid, datasource_items in self.dashboard_datasource_index.items():
datasource_item: DatasourceItem
for datasource_item in datasource_items:
datasource_name_or_uid = datasource_item.uid or datasource_item.name
if datasource_name_or_uid in self.datasource_by_name:
datasource_name_or_uid = self.datasource_by_name[datasource_name_or_uid].uid
if "uid" in self.datasource_by_name[datasource_name_or_uid]:
datasource_name_or_uid = self.datasource_by_name[datasource_name_or_uid].uid
self.datasource_dashboard_index.setdefault(datasource_name_or_uid, [])
self.datasource_dashboard_index[datasource_name_or_uid].append(dashboard_uid)
4 changes: 2 additions & 2 deletions grafana_wtf/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ class DatasourceExplorationItem:

def format_compact(self):
dsshort = OrderedDict(
uid=self.datasource.uid,
uid=self.datasource.get("uid"),
name=self.datasource.name,
type=self.datasource.type,
url=self.datasource.url,
Expand Down Expand Up @@ -103,7 +103,7 @@ def format_compact(self):
for datasource in self.datasources:
item.setdefault("datasources", [])
dsshort = OrderedDict(
uid=datasource.uid,
uid=datasource.get("uid"),
name=datasource.name,
type=datasource.type,
url=datasource.url,
Expand Down
2 changes: 2 additions & 0 deletions tests/.env
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
#GRAFANA_VERSION=6.7.6
#GRAFANA_VERSION=7.5.12
GRAFANA_VERSION=8.3.1
4 changes: 2 additions & 2 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
import sys
import re
from pathlib import Path

import pytest
Expand Down Expand Up @@ -50,7 +50,7 @@ def _create_datasource(name: str, type: str, access: str):
try:
grafana.datasource.create_datasource(dict(name=name, type=type, access=access))
except GrafanaClientError as ex:
if "Client Error 409: data source with the same name already exists" not in str(ex):
if not re.match("Client Error 409: Data source with (the )?same name already exists", str(ex), re.IGNORECASE):
raise

return _create_datasource
Expand Down
6 changes: 3 additions & 3 deletions tests/test_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,17 +223,17 @@ def test_explore_dashboards(docker_grafana, create_datasource, capsys, caplog):

missing = find_all_missing_datasources(data)

# Those are bogus!
# FIXME: Those are coming from a bogus migration from schema version 27 to 33.
assert missing[0]["name"] == "weatherbase"
assert missing[1]["uid"] == "weatherbase"
# assert missing[1]["uid"] == "weatherbase"


def find_all_missing_datasources(data):
missing_items = []
for item in data:
if "datasources_missing" in item:
missing_items += item["datasources_missing"]
return sorted(missing_items, key=lambda x: x["name"] or x["uid"])
return missing_items


def test_info(docker_grafana, capsys, caplog):
Expand Down

0 comments on commit 1c7e2a8

Please sign in to comment.