Skip to content

Commit

Permalink
Merge pull request #30 from mullerpeter/feature/auto-completion
Browse files Browse the repository at this point in the history
✨ Feat: add auto completion to query editor
  • Loading branch information
mullerpeter committed Sep 20, 2023
2 parents 994452b + 15c1384 commit f9d6a1b
Show file tree
Hide file tree
Showing 19 changed files with 1,520 additions and 229 deletions.
11 changes: 10 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,15 @@
# Changelog

## 1.1.8
## 1.2.0

- Feature: Add support for auto complete suggestions in query editor
- Experimental Feature, disabled by default (Can be enabled in Datasource Settings)
- Could not find a nice library to generate suggestions for Databricks SQL, so I wrote my own. Feels a bit spaghetti, but it works quite well. Suggestion Model is far from complete but covers most of the use cases.
- Feature: Add support to run multi statement queries (i.e. `USE <catalog>.<schema>; SELECT * FROM <table>`)
- Refactor: Cleanup unused code in backend & upgrade legacy form components in config editor

---
### 1.1.8

- Update grafana-plugin-sdk-go to v0.176.0
- Migrate to @grafana/create-plugin
Expand Down
26 changes: 21 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -54,11 +54,13 @@ To configure the plugin use the values provided under JDBC/ODBC in the advanced

Available configuration fields are as follows:

| Name | Description |
|-----------------|-----------------------------------------------------------------------------------------|
| Server Hostname | Databricks Server Hostname (without http). i.e. `XXX.cloud.databricks.com` |
| HTTP Path | HTTP Path value for the existing cluster or SQL warehouse. i.e. `sql/1.0/endpoints/XXX` |
| Access Token | Personal Access Token for Databricks. |
| Name | Description |
|----------------------|--------------------------------------------------------------------------------------------------------------|
| Server Hostname | Databricks Server Hostname (without http). i.e. `XXX.cloud.databricks.com` |
| Server Port | Databricks Server Port (default `443`) |
| HTTP Path | HTTP Path value for the existing cluster or SQL warehouse. i.e. `sql/1.0/endpoints/XXX` |
| Access Token | Personal Access Token for Databricks. |
| Code Auto Completion | If enabled the SQL editor will fetch catalogs/schemas/tables/columns from Databricks to provide suggestions. |

### Supported Macros

Expand All @@ -83,6 +85,20 @@ By default, the plugin will return the results in wide format. This behavior can

![img.png](img/advanced_options.png)

#### Code Auto Completion

Auto Completion for the code editor is still in development. Basic functionality is implemented,
but might not always work perfectly. When enabled, the editor will make requests to Databricks
while typing to get the available catalogs, schemas, tables and columns. Only the tables present
in the current query will be fetched.
Additionally, the editor will also make suggestions for
Databricks SQL functions & keywords and Grafana macros.

The feature can be enabled in the Datasource Settings.

<img alt="img.png" src="img/autocomplete-02.png" width="52%"/>
<img alt="img.png" src="img/autocomplete-01.png" width="40%"/>

### Examples
#### Single Value Time Series

Expand Down
Binary file added img/autocomplete-01.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img/autocomplete-02.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified img/config_editor.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "mullerpeter-databricks-datasource",
"private": true,
"version": "1.1.8",
"version": "1.2.0",
"description": "Databricks SQL Connector",
"scripts": {
"build": "webpack -c ./.config/webpack/webpack.config.ts --env production",
Expand Down
2 changes: 1 addition & 1 deletion pkg/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ func main() {
// to exit by itself using os.Exit. Manage automatically manages life cycle
// of datasource instances. It accepts datasource instance factory as first
// argument. This factory will be automatically called on incoming request
// from Grafana to create different instances of SampleDatasource (per datasource
// from Grafana to create different instances of Datasource (per datasource
// ID). When datasource configuration changed Dispose method will be called and
// new datasource instance created using NewSampleDatasource factory.
if err := datasource.Manage("databricks-community", plugin.NewSampleDatasource, datasource.ManageOpts{}); err != nil {
Expand Down
226 changes: 226 additions & 0 deletions pkg/plugin/helper.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,226 @@
package plugin

import (
"database/sql"
"encoding/json"
"fmt"
_ "github.com/databricks/databricks-sql-go"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
)

type schemaRequestBody struct {
Catalog string `json:"catalog"`
Schema string `json:"schema"`
Table string `json:"table"`
}

type columnsResponseBody struct {
ColumnName string `json:"name"`
ColumnType string `json:"type"`
}

type defaultsResponseBody struct {
DefaultCatalog string `json:"defaultCatalog"`
DefaultSchema string `json:"defaultSchema"`
}

func autocompletionQueries(req *backend.CallResourceRequest, sender backend.CallResourceResponseSender, db *sql.DB) error {
path := req.Path
log.DefaultLogger.Info("CallResource called", "path", path)
var body schemaRequestBody
err := json.Unmarshal(req.Body, &body)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
switch path {
case "catalogs":
rows, err := db.Query("SHOW CATALOGS")
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
defer rows.Close()
catalogs := make([]string, 0)
for rows.Next() {
var catalog string
err := rows.Scan(&catalog)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
catalogs = append(catalogs, catalog)
}
err = rows.Err()
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
jsonBody, err := json.Marshal(catalogs)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
err = sender.Send(&backend.CallResourceResponse{
Status: 200,
Body: jsonBody,
})
return err
case "schemas":
queryString := "SHOW SCHEMAS"

if body.Catalog != "" {
queryString = fmt.Sprintf("SHOW SCHEMAS IN %s", body.Catalog)
}
log.DefaultLogger.Info("CallResource called", "queryString", queryString)
rows, err := db.Query(queryString)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
defer rows.Close()
schemas := make([]string, 0)
for rows.Next() {
var schema string
err := rows.Scan(&schema)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
schemas = append(schemas, schema)
}
err = rows.Err()
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
jsonBody, err := json.Marshal(schemas)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
err = sender.Send(&backend.CallResourceResponse{
Status: 200,
Body: jsonBody,
})
return err
case "tables":
queryString := "SHOW TABLES"
if body.Schema != "" {
queryString = fmt.Sprintf("SHOW TABLES IN %s", body.Schema)
if body.Catalog != "" {
queryString = fmt.Sprintf("SHOW TABLES IN %s.%s", body.Catalog, body.Schema)
}
}
log.DefaultLogger.Info("CallResource called", "queryString", queryString)
rows, err := db.Query(queryString)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
defer rows.Close()
tables := make([]string, 0)
for rows.Next() {
var database string
var tableName string
var isTemporary bool
err := rows.Scan(&database, &tableName, &isTemporary)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
tables = append(tables, tableName)
}
err = rows.Err()
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
jsonBody, err := json.Marshal(tables)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
err = sender.Send(&backend.CallResourceResponse{
Status: 200,
Body: jsonBody,
})
return err
case "columns":
queryString := fmt.Sprintf("DESCRIBE TABLE %s", body.Table)
log.DefaultLogger.Info("CallResource called", "queryString", queryString)
rows, err := db.Query(queryString)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
defer rows.Close()
columnsResponse := make([]columnsResponseBody, 0)
for rows.Next() {
var colName sql.NullString
var colType sql.NullString
var comment sql.NullString
err := rows.Scan(&colName, &colType, &comment)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
columnsResponse = append(columnsResponse, columnsResponseBody{
ColumnName: colName.String,
ColumnType: colType.String,
})
}
err = rows.Err()
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}

jsonBody, err := json.Marshal(columnsResponse)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
err = sender.Send(&backend.CallResourceResponse{
Status: 200,
Body: jsonBody,
})
return err
case "defaults":
queryString := "SELECT current_catalog(), current_schema();"
log.DefaultLogger.Info("CallResource called", "queryString", queryString)
row := db.QueryRow(queryString)
var currentCatalog sql.NullString
var currentSchema sql.NullString

err := row.Scan(&currentCatalog, &currentSchema)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}

defaultsResponse := defaultsResponseBody{
DefaultCatalog: currentCatalog.String,
DefaultSchema: currentSchema.String,
}

jsonBody, err := json.Marshal(defaultsResponse)
if err != nil {
log.DefaultLogger.Error("CallResource Error", "err", err)
return err
}
err = sender.Send(&backend.CallResourceResponse{
Status: 200,
Body: jsonBody,
})
return err
default:
log.DefaultLogger.Error("CallResource Error", "err", "Unknown URL")
err := sender.Send(&backend.CallResourceResponse{
Status: 404,
Body: []byte("Unknown URL"),
})
return err
}
}
Loading

0 comments on commit f9d6a1b

Please sign in to comment.