diff --git a/deploy-manage/cloud-organization/billing/manage-subscription.md b/deploy-manage/cloud-organization/billing/manage-subscription.md index 4a6b13d9f6..f67cc0e96b 100644 --- a/deploy-manage/cloud-organization/billing/manage-subscription.md +++ b/deploy-manage/cloud-organization/billing/manage-subscription.md @@ -67,7 +67,7 @@ You can [change your subscription level](/deploy-manage/cloud-organization/billi : Edit your deployment index management policies to disable the frozen tier that is using [searchable snapshots](/deploy-manage/tools/snapshot-and-restore/searchable-snapshots.md), or set up your cold tier to not mount indices from a searchable snapshot. `JDBC/ODBC clients` -: Make sure that there are no applications that use the SQL [JDBC](/explore-analyze/query-filter/languages/sql-jdbc.md) or [ODBC](/explore-analyze/query-filter/languages/sql-odbc.md) clients. +: Make sure that there are no applications that use the SQL [JDBC](elasticsearch://reference/query-languages/sql/sql-jdbc.md) or [ODBC](elasticsearch://reference/query-languages/sql/sql-odbc.md) clients. `Field-level or document-level security` : Remove any user role configurations based on field or document access [through the API](/deploy-manage/users-roles/cluster-or-deployment-auth/controlling-access-at-document-field-level.md) or the {{kib}} [Roles](/deploy-manage/users-roles/cluster-or-deployment-auth/defining-roles.md) page. diff --git a/explore-analyze/images/elasticsearch-reference-apps_excel_cred.png b/explore-analyze/images/elasticsearch-reference-apps_excel_cred.png deleted file mode 100644 index a3da36dbf6..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_excel_cred.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_excel_dsn.png b/explore-analyze/images/elasticsearch-reference-apps_excel_dsn.png deleted file mode 100644 index 7e81cc01f1..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_excel_dsn.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_excel_fromodbc.png b/explore-analyze/images/elasticsearch-reference-apps_excel_fromodbc.png deleted file mode 100644 index 603af4dfc7..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_excel_fromodbc.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_excel_loaded.png b/explore-analyze/images/elasticsearch-reference-apps_excel_loaded.png deleted file mode 100644 index 7d7ea86c8c..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_excel_loaded.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_excel_picktable.png b/explore-analyze/images/elasticsearch-reference-apps_excel_picktable.png deleted file mode 100644 index fd7aecc412..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_excel_picktable.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_microstrat_databases.png b/explore-analyze/images/elasticsearch-reference-apps_microstrat_databases.png deleted file mode 100644 index 9f1c69b796..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_microstrat_databases.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_microstrat_dsn.png b/explore-analyze/images/elasticsearch-reference-apps_microstrat_dsn.png deleted file mode 100644 index 4fa4c90947..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_microstrat_dsn.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_microstrat_live.png b/explore-analyze/images/elasticsearch-reference-apps_microstrat_live.png deleted file mode 100644 index 2a3e0fa02a..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_microstrat_live.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_microstrat_loadtable.png b/explore-analyze/images/elasticsearch-reference-apps_microstrat_loadtable.png deleted file mode 100644 index a1502c4e9f..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_microstrat_loadtable.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_microstrat_newdata.png b/explore-analyze/images/elasticsearch-reference-apps_microstrat_newdata.png deleted file mode 100644 index 3a00c6dffe..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_microstrat_newdata.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_microstrat_newdossier.png b/explore-analyze/images/elasticsearch-reference-apps_microstrat_newdossier.png deleted file mode 100644 index 275588a7fe..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_microstrat_newdossier.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_microstrat_newds.png b/explore-analyze/images/elasticsearch-reference-apps_microstrat_newds.png deleted file mode 100644 index 45e3666eae..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_microstrat_newds.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_microstrat_tables.png b/explore-analyze/images/elasticsearch-reference-apps_microstrat_tables.png deleted file mode 100644 index 71283d05e5..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_microstrat_tables.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_microstrat_visualize.png b/explore-analyze/images/elasticsearch-reference-apps_microstrat_visualize.png deleted file mode 100644 index 3e15946f0f..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_microstrat_visualize.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_pbi_dsn.png b/explore-analyze/images/elasticsearch-reference-apps_pbi_dsn.png deleted file mode 100644 index 9e9512ec40..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_pbi_dsn.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_pbi_fromodbc1.png b/explore-analyze/images/elasticsearch-reference-apps_pbi_fromodbc1.png deleted file mode 100644 index 313b1edbc7..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_pbi_fromodbc1.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_pbi_fromodbc2.png b/explore-analyze/images/elasticsearch-reference-apps_pbi_fromodbc2.png deleted file mode 100644 index fade98f4ad..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_pbi_fromodbc2.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_pbi_loaded.png b/explore-analyze/images/elasticsearch-reference-apps_pbi_loaded.png deleted file mode 100644 index c1927d2200..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_pbi_loaded.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_pbi_picktable.png b/explore-analyze/images/elasticsearch-reference-apps_pbi_picktable.png deleted file mode 100644 index 2b2e1c8e4e..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_pbi_picktable.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_ps_exed.png b/explore-analyze/images/elasticsearch-reference-apps_ps_exed.png deleted file mode 100644 index 84c3c12ec4..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_ps_exed.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_qlik_adddata.png b/explore-analyze/images/elasticsearch-reference-apps_qlik_adddata.png deleted file mode 100644 index b32596c1c0..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_qlik_adddata.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_qlik_create.png b/explore-analyze/images/elasticsearch-reference-apps_qlik_create.png deleted file mode 100644 index 4a2438c1cf..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_qlik_create.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_qlik_dsn.png b/explore-analyze/images/elasticsearch-reference-apps_qlik_dsn.png deleted file mode 100644 index 79852e5016..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_qlik_dsn.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_qlik_newapp.png b/explore-analyze/images/elasticsearch-reference-apps_qlik_newapp.png deleted file mode 100644 index 1909707825..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_qlik_newapp.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_qlik_odbc.png b/explore-analyze/images/elasticsearch-reference-apps_qlik_odbc.png deleted file mode 100644 index 9b56fe6bcb..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_qlik_odbc.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_qlik_open.png b/explore-analyze/images/elasticsearch-reference-apps_qlik_open.png deleted file mode 100644 index f4e33230ec..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_qlik_open.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_qlik_selecttable.png b/explore-analyze/images/elasticsearch-reference-apps_qlik_selecttable.png deleted file mode 100644 index c6a485cb85..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_qlik_selecttable.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_qlik_visualize.png b/explore-analyze/images/elasticsearch-reference-apps_qlik_visualize.png deleted file mode 100644 index c87cd505de..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_qlik_visualize.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_connect.png b/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_connect.png deleted file mode 100644 index db0840220b..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_connect.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_from_connector.png b/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_from_connector.png deleted file mode 100644 index 5095305323..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_from_connector.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_prepare.png b/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_prepare.png deleted file mode 100644 index 2e0bea7090..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_prepare.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_report.png b/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_report.png deleted file mode 100644 index 51646ca353..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_report.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_tableau_server_connect.png b/explore-analyze/images/elasticsearch-reference-apps_tableau_server_connect.png deleted file mode 100644 index b2f2b5703b..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_tableau_server_connect.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_tableau_server_from_connector.png b/explore-analyze/images/elasticsearch-reference-apps_tableau_server_from_connector.png deleted file mode 100644 index f832d63697..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_tableau_server_from_connector.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_tableau_server_prepare.png b/explore-analyze/images/elasticsearch-reference-apps_tableau_server_prepare.png deleted file mode 100644 index 3b1b3a579d..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_tableau_server_prepare.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-apps_tableau_server_report.png b/explore-analyze/images/elasticsearch-reference-apps_tableau_server_report.png deleted file mode 100644 index 5b7418d8dd..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-apps_tableau_server_report.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbeaver-1-new-conn.png b/explore-analyze/images/elasticsearch-reference-dbeaver-1-new-conn.png deleted file mode 100644 index bf7f1c6313..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbeaver-1-new-conn.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbeaver-2-conn-es.png b/explore-analyze/images/elasticsearch-reference-dbeaver-2-conn-es.png deleted file mode 100644 index f63df0987c..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbeaver-2-conn-es.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbeaver-3-conn-props.png b/explore-analyze/images/elasticsearch-reference-dbeaver-3-conn-props.png deleted file mode 100644 index 825ce1b635..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbeaver-3-conn-props.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbeaver-4-driver-ver.png b/explore-analyze/images/elasticsearch-reference-dbeaver-4-driver-ver.png deleted file mode 100644 index bcad2a75d8..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbeaver-4-driver-ver.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbeaver-5-test-conn.png b/explore-analyze/images/elasticsearch-reference-dbeaver-5-test-conn.png deleted file mode 100644 index c76ae19937..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbeaver-5-test-conn.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbeaver-6-data.png b/explore-analyze/images/elasticsearch-reference-dbeaver-6-data.png deleted file mode 100644 index 053042b791..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbeaver-6-data.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbvis_add_connection.png b/explore-analyze/images/elasticsearch-reference-dbvis_add_connection.png deleted file mode 100644 index 3a6ec7ac38..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbvis_add_connection.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbvis_add_db_connection.png b/explore-analyze/images/elasticsearch-reference-dbvis_add_db_connection.png deleted file mode 100644 index 5c82231b54..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbvis_add_db_connection.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbvis_connection_details.png b/explore-analyze/images/elasticsearch-reference-dbvis_connection_details.png deleted file mode 100644 index 8201abd548..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbvis_connection_details.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbvis_hero.png b/explore-analyze/images/elasticsearch-reference-dbvis_hero.png deleted file mode 100644 index 512078d773..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbvis_hero.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbvis_new_driver_done.png b/explore-analyze/images/elasticsearch-reference-dbvis_new_driver_done.png deleted file mode 100644 index d69c15bf43..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbvis_new_driver_done.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbvis_new_driver_refresh.png b/explore-analyze/images/elasticsearch-reference-dbvis_new_driver_refresh.png deleted file mode 100644 index 96b57b430e..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbvis_new_driver_refresh.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbvis_new_driver_start.png b/explore-analyze/images/elasticsearch-reference-dbvis_new_driver_start.png deleted file mode 100644 index 68df4b00fc..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbvis_new_driver_start.png and /dev/null differ diff --git a/explore-analyze/images/elasticsearch-reference-dbvis_open_driver_manager.png b/explore-analyze/images/elasticsearch-reference-dbvis_open_driver_manager.png deleted file mode 100644 index ec1078658a..0000000000 Binary files a/explore-analyze/images/elasticsearch-reference-dbvis_open_driver_manager.png and /dev/null differ diff --git a/explore-analyze/query-filter/languages/sql-async.md b/explore-analyze/query-filter/languages/sql-async.md deleted file mode 100644 index 26016ce0a5..0000000000 --- a/explore-analyze/query-filter/languages/sql-async.md +++ /dev/null @@ -1,125 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-async.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Run an async SQL search [sql-async] - -By default, SQL searches are synchronous. They wait for complete results before returning a response. However, results can take longer for searches across large data sets or [frozen data](../../../manage-data/lifecycle/data-tiers.md). - -To avoid long waits, run an async SQL search. Set `wait_for_completion_timeout` to a duration you’d like to wait for synchronous results. - -```console -POST _sql?format=json -{ - "wait_for_completion_timeout": "2s", - "query": "SELECT * FROM library ORDER BY page_count DESC", - "fetch_size": 5 -} -``` - -If the search doesn’t finish within this period, the search becomes async. The API returns: - -* An `id` for the search. -* An `is_partial` value of `true`, indicating the search results are incomplete. -* An `is_running` value of `true`, indicating the search is still running in the background. - -For CSV, TSV, and TXT responses, the API returns these values in the respective `Async-ID`, `Async-partial`, and `Async-running` HTTP headers instead. - -```console-result -{ - "id": "FnR0TDhyWUVmUmVtWXRWZER4MXZiNFEad2F5UDk2ZVdTVHV1S0xDUy00SklUdzozMTU=", - "is_partial": true, - "is_running": true, - "rows": [ ] -} -``` - -To check the progress of an async search, use the search ID with the [get async SQL search status API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-get-async-status). - -```console -GET _sql/async/status/FnR0TDhyWUVmUmVtWXRWZER4MXZiNFEad2F5UDk2ZVdTVHV1S0xDUy00SklUdzozMTU= -``` - -If `is_running` and `is_partial` are `false`, the async search has finished with complete results. - -```console-result -{ - "id": "FnR0TDhyWUVmUmVtWXRWZER4MXZiNFEad2F5UDk2ZVdTVHV1S0xDUy00SklUdzozMTU=", - "is_running": false, - "is_partial": false, - "expiration_time_in_millis": 1611690295000, - "completion_status": 200 -} -``` - -To get the results, use the search ID with the [get async SQL search API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-get-async). If the search is still running, specify how long you’d like to wait using `wait_for_completion_timeout`. You can also specify the response `format`. - -```console -GET _sql/async/FnR0TDhyWUVmUmVtWXRWZER4MXZiNFEad2F5UDk2ZVdTVHV1S0xDUy00SklUdzozMTU=?wait_for_completion_timeout=2s&format=json -``` - - -## Change the search retention period [sql-async-retention] - -By default, {{es}} stores async SQL searches for five days. After this period, {{es}} deletes the search and its results, even if the search is still running. To change this retention period, use the `keep_alive` parameter. - -```console -POST _sql?format=json -{ - "keep_alive": "2d", - "wait_for_completion_timeout": "2s", - "query": "SELECT * FROM library ORDER BY page_count DESC", - "fetch_size": 5 -} -``` - -You can use the get async SQL search API’s `keep_alive` parameter to later change the retention period. The new period starts after the request runs. - -```console -GET _sql/async/FmdMX2pIang3UWhLRU5QS0lqdlppYncaMUpYQ05oSkpTc3kwZ21EdC1tbFJXQToxOTI=?keep_alive=5d&wait_for_completion_timeout=2s&format=json -``` - -Use the [delete async SQL search API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-delete-async) to delete an async search before the `keep_alive` period ends. If the search is still running, {{es}} cancels it. - -```console -DELETE _sql/async/delete/FmdMX2pIang3UWhLRU5QS0lqdlppYncaMUpYQ05oSkpTc3kwZ21EdC1tbFJXQToxOTI= -``` - - -## Store synchronous SQL searches [sql-store-searches] - -By default, {{es}} only stores async SQL searches. To save a synchronous search, specify `wait_for_completion_timeout` and set `keep_on_completion` to `true`. - -```console -POST _sql?format=json -{ - "keep_on_completion": true, - "wait_for_completion_timeout": "2s", - "query": "SELECT * FROM library ORDER BY page_count DESC", - "fetch_size": 5 -} -``` - -If `is_partial` and `is_running` are `false`, the search was synchronous and returned complete results. - -```console-result -{ - "id": "Fnc5UllQdUVWU0NxRFNMbWxNYXplaFEaMUpYQ05oSkpTc3kwZ21EdC1tbFJXQTo0NzA=", - "is_partial": false, - "is_running": false, - "rows": ..., - "columns": ..., - "cursor": ... -} -``` - -You can get the same results later using the search ID with the [get async SQL search API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-get-async). - -Saved synchronous searches are still subject to the `keep_alive` retention period. When this period ends, {{es}} deletes the search results. You can also delete saved searches using the [delete async SQL search API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-delete-async). - diff --git a/explore-analyze/query-filter/languages/sql-cli.md b/explore-analyze/query-filter/languages/sql-cli.md deleted file mode 100644 index b511233551..0000000000 --- a/explore-analyze/query-filter/languages/sql-cli.md +++ /dev/null @@ -1,153 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-cli.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# SQL CLI [sql-cli] - -Elasticsearch ships with a script to run the SQL CLI in its `bin` directory: - -```bash -$ ./bin/elasticsearch-sql-cli -``` - -You can pass the URL of the Elasticsearch instance to connect to as the first parameter: - -```bash -$ ./bin/elasticsearch-sql-cli https://some.server:9200 -``` - -If security is enabled on your cluster, you can pass the username and password in the form `username:password@host_name:port` to the SQL CLI: - -```bash -$ ./bin/elasticsearch-sql-cli https://sql_user:strongpassword@some.server:9200 -``` - -Once the CLI is running you can use any [query](elasticsearch://reference/query-languages/sql/sql-spec.md) that Elasticsearch supports: - -```sql -sql> SELECT * FROM library WHERE page_count > 500 ORDER BY page_count DESC; - author | name | page_count | release_date ------------------+--------------------+---------------+--------------- -Peter F. Hamilton|Pandora's Star |768 |1078185600000 -Vernor Vinge |A Fire Upon the Deep|613 |707356800000 -Frank Herbert |Dune |604 |-144720000000 -Alastair Reynolds|Revelation Space |585 |953078400000 -James S.A. Corey |Leviathan Wakes |561 |1306972800000 -``` - -The jar containing the SQL CLI is a stand alone Java application and the scripts just launch it. You can move it around to other machines without having to install Elasticsearch on them. Without the already provided script files, you can use a command similar to the following to start the SQL CLI: - -```bash -$ ./java -jar [PATH_TO_CLI_JAR]/elasticsearch-sql-cli-[VERSION].jar https://some.server:9200 -``` - -or - -```bash -$ ./java -cp [PATH_TO_CLI_JAR]/elasticsearch-sql-cli-[VERSION].jar org.elasticsearch.xpack.sql.cli.Cli https://some.server:9200 -``` - -The jar name will be different for each Elasticsearch version (for example `elasticsearch-sql-cli-7.3.2.jar`), thus the generic `VERSION` specified in the example above. Furthermore, if not running the command from the folder where the SQL CLI jar resides, you’d have to provide the full path, as well. - - -## CLI commands [cli-commands] - -Apart from SQL queries, CLI can also execute some specific commands: - -`allow_partial_search_results = ` (default `false`) -: If `true`, returns partial results if there are shard request timeouts or [shard failures](../../../deploy-manage/distributed-architecture/reading-and-writing-documents.md#shard-failures). If `false`, returns an error with no partial results. - -```sql -sql> allow_partial_search_results = true; -allow_partial_search_results set to true -``` - -`fetch_size = ` (default `1000`) -: Allows to change the size of fetches for query execution. Each fetch is delimited by fetch separator (if explicitly set). - -```sql -sql> fetch_size = 2000; -fetch size set to 2000 -``` - -`fetch_separator = ` (empty string by default) -: Allows to change the separator string between fetches. - -```sql -sql> fetch_separator = "---------------------"; -fetch separator set to "---------------------" -``` - -`lenient = ` (default `false`) -: If `false`, Elasticsearch SQL returns an error for fields containing [array values](elasticsearch://reference/elasticsearch/mapping-reference/array.md). If `true`, Elasticsearch SQL returns the first value from the array with no guarantee of consistent results. - -```sql -sql> lenient = true; -lenient set to true -``` - -`info` -: Returns server information. - -```sql -sql> info; -Node:mynode Cluster:elasticsearch Version:8.3 -``` - -`exit` -: Closes the CLI. - -```sql -sql> exit; -Bye! -``` - -`cls` -: Clears the screen. - -```sql -sql> cls; -``` - -`logo` -: Prints Elastic logo. - -```sql -sql> logo; - - asticElasticE - ElasticE sticEla - sticEl ticEl Elast - lasti Elasti tic - cEl ast icE - icE as cEl - icE as cEl - icEla las El - sticElasticElast icElas - las last ticElast -El asti asti stic -El asticEla Elas icE -El Elas cElasticE ticEl cE -Ela ticEl ticElasti cE - las astic last icE - sticElas asti stic - icEl sticElasticElast - icE sticE ticEla - icE sti cEla - icEl sti Ela - cEl sti cEl - Ela astic ticE - asti ElasticElasti - ticElasti lasticElas - ElasticElast - - SQL - 8.3.0 -``` - diff --git a/explore-analyze/query-filter/languages/sql-client-apps-dbeaver.md b/explore-analyze/query-filter/languages/sql-client-apps-dbeaver.md deleted file mode 100644 index f6c7325e4b..0000000000 --- a/explore-analyze/query-filter/languages/sql-client-apps-dbeaver.md +++ /dev/null @@ -1,75 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-client-apps-dbeaver.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# DBeaver [sql-client-apps-dbeaver] - -You can use the {{es}} JDBC driver to access {{es}} data from DBeaver. - -::::{important} -Elastic does not endorse, promote or provide support for this application; for native Elasticsearch integration in this product, reach out to its vendor. -:::: - - -## Prerequisites [_prerequisites_2] - -* [DBeaver](https://dbeaver.io/) version 6.0.0 or higher -* Elasticsearch SQL [JDBC driver](sql-jdbc.md) - - -## New Connection [_new_connection] - -Create a new connection either through the menu **File** > **New** > **Database Connection** menu or directly through the **Database Connection** panel. - -![dbeaver 1 new conn](/explore-analyze/images/elasticsearch-reference-dbeaver-1-new-conn.png "") - - -## Select {{es}} type [_select_es_type] - -Select the {{es}} type from the available connection types: - -![dbeaver 2 conn es](/explore-analyze/images/elasticsearch-reference-dbeaver-2-conn-es.png "") - - -## Specify the {{es}} cluster information [_specify_the_es_cluster_information] - -Configure the Elasticsearch SQL connection appropriately: - -![dbeaver 3 conn props](/explore-analyze/images/elasticsearch-reference-dbeaver-3-conn-props.png "") - - -## Verify the driver version [_verify_the_driver_version] - -Make sure the correct JDBC driver version is used by using the **Edit Driver Settings** button: - -![dbeaver 4 driver ver](/explore-analyze/images/elasticsearch-reference-dbeaver-4-driver-ver.png "") - -DBeaver is aware of the {{es}} JDBC maven repository so simply **Download/Update** the artifact or add a new one. As an alternative one can add a local file instead if the {{es}} Maven repository is not an option. - -When changing the driver, make sure to click on the **Find Class** button at the bottom - the Driver class should be picked out automatically however this provides a sanity check that the driver jar is properly found and it is not corrupt. - - -## Test connectivity [_test_connectivity] - -Once the driver version and the settings are in place, use **Test Connection** to check that everything works. If things are okay, one should get a confirmation window with the version of the driver and that of Elasticsearch SQL: - -![dbeaver 5 test conn](/explore-analyze/images/elasticsearch-reference-dbeaver-5-test-conn.png "") - -Click **Finish** and the new {{es}} connection appears in the **Database Connection** panel. - -DBeaver is now configured to talk to {{es}}. - - -## Connect to {{es}} [_connect_to_es] - -Simply click on the {{es}} connection and start querying and exploring {{es}}: - -![dbeaver 6 data](/explore-analyze/images/elasticsearch-reference-dbeaver-6-data.png "") - - diff --git a/explore-analyze/query-filter/languages/sql-client-apps-dbvis.md b/explore-analyze/query-filter/languages/sql-client-apps-dbvis.md deleted file mode 100644 index dc1a63f8f5..0000000000 --- a/explore-analyze/query-filter/languages/sql-client-apps-dbvis.md +++ /dev/null @@ -1,64 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-client-apps-dbvis.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# DbVisualizer [sql-client-apps-dbvis] - -You can use the {{es}} JDBC driver to access {{es}} data from DbVisualizer. - -::::{important} -Elastic does not endorse, promote or provide support for this application. -:::: - -## Prerequisites [_prerequisites_3] - -* [DbVisualizer](https://www.dbvis.com/) 13.0 or higher -* Elasticsearch SQL [JDBC driver](sql-jdbc.md) - - Note - : Pre 13.0 versions of DbVisualizer can still connect to {{es}} by having the [JDBC driver](sql-jdbc.md) set up from the generic **Custom** template. - -## Setup the {{es}} JDBC driver [_setup_the_es_jdbc_driver] - -Setup the {{es}} JDBC driver through **Tools** > **Driver Manager**: - -![dbvis driver manager](/explore-analyze/images/elasticsearch-reference-dbvis_open_driver_manager.png "") - -Select **Elasticsearch** driver template from the left sidebar to create a new user driver: - -![dbvis driver manager elasticsearch](/explore-analyze/images/elasticsearch-reference-dbvis_new_driver_done.png "") - -Download the driver locally: - -![dbvis driver manager download](/explore-analyze/images/elasticsearch-reference-dbvis_new_driver_start.png "") - -and check its availability status: - -![dbvis driver manager ready](/explore-analyze/images/elasticsearch-reference-dbvis_new_driver_refresh.png "") - - -## Create a new connection [_create_a_new_connection] - -Once the {{es}} driver is in place, create a new connection: - -![dbvis new connection](/explore-analyze/images/elasticsearch-reference-dbvis_add_db_connection.png "") - -by double-clicking the {{es}} entry in the list of available drivers: - -![dbvis new elasticsearch connection](/explore-analyze/images/elasticsearch-reference-dbvis_add_connection.png "") - -Enter the connection details, then press **Connect** and the driver version (as that of the cluster) should show up under **Connection Message**. - -![dbvis enter connection details](/explore-analyze/images/elasticsearch-reference-dbvis_connection_details.png "") - -## Execute SQL queries [_execute_sql_queries] - -The setup is done. DbVisualizer can be used to run queries against {{es}} and explore its content: - -![dbvis running queries](/explore-analyze/images/elasticsearch-reference-dbvis_hero.png "") \ No newline at end of file diff --git a/explore-analyze/query-filter/languages/sql-client-apps-excel.md b/explore-analyze/query-filter/languages/sql-client-apps-excel.md deleted file mode 100644 index af9bcc14f3..0000000000 --- a/explore-analyze/query-filter/languages/sql-client-apps-excel.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-client-apps-excel.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Microsoft Excel [sql-client-apps-excel] - -You can use the {{es}} ODBC driver to access {{es}} data from Microsoft Excel. - -::::{important} -Elastic does not endorse, promote or provide support for this application; for native Elasticsearch integration in this product, reach out to its vendor. -:::: - - -## Prerequisites [_prerequisites_4] - -* [Microsoft Office](https://products.office.com/en/excel) 2016 or higher -* Elasticsearch SQL [ODBC driver](sql-odbc.md) -* A preconfigured User or System DSN (see [Configuration](sql-odbc-setup.md#dsn-configuration) section on how to configure a DSN). - - -## Load data into a spreadsheet [_load_data_into_a_spreadsheet] - -First, you’ll need to choose ODBC as the source to load data from. To do so, click on the *Data* tab, then *New Query* button, in the drop-down menu expand *From Other Sources*, then choose *From ODBC*: - -$$$apps_excel_fromodbc$$$ -![apps excel fromodbc](/explore-analyze/images/elasticsearch-reference-apps_excel_fromodbc.png "") - -This will open a new window with a drop down menu populated with the DSNs that Excel found on the system. Choose a DSN configured to connect to your {{es}} instance and press the *OK* button: - -$$$apps_excel_dsn$$$ -![apps excel dsn](/explore-analyze/images/elasticsearch-reference-apps_excel_dsn.png "") - -This will lead to a new window, allowing the user to input the connection credentials. - -A username might be required by Excel even if the {{es}} instance has no security enabled. Providing a bogus username with no password in this case will not hinder the connectivity. Note however that Excel will cache these credentials (so in case you do have security enabled, you won’t be prompted for the credentials a second time). - -Fill in the username and the password and press *Connect*. - -$$$apps_excel_cred$$$ -![apps excel cred](/explore-analyze/images/elasticsearch-reference-apps_excel_cred.png "") - -Once connected, Excel will read {{es}}'s catalog and offer the user a choice of tables (indices) to load data from. Clicking on one of the tables will load a preview of the data within: - -$$$apps_excel_picktable$$$ -![apps excel picktable](/explore-analyze/images/elasticsearch-reference-apps_excel_picktable.png "") - -Now click the *Load* button, which will have Excel load all the data from the table into a spreadsheet: - -$$$apps_excel_loaded$$$ -![apps excel loaded](/explore-analyze/images/elasticsearch-reference-apps_excel_loaded.png "") - - diff --git a/explore-analyze/query-filter/languages/sql-client-apps-microstrat.md b/explore-analyze/query-filter/languages/sql-client-apps-microstrat.md deleted file mode 100644 index f7edb85bff..0000000000 --- a/explore-analyze/query-filter/languages/sql-client-apps-microstrat.md +++ /dev/null @@ -1,99 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-client-apps-microstrat.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# MicroStrategy Desktop [sql-client-apps-microstrat] - -You can use the {{es}} ODBC driver to access {{es}} data from MicroStrategy Desktop. - -::::{important} -Elastic does not endorse, promote or provide support for this application; for native Elasticsearch integration in this product, reach out to its vendor. -:::: - - -## Prerequisites [_prerequisites_7] - -* [MicroStrategy Desktop](https://www.microstrategy.com/us/get-started/desktop) 11 or higher -* Elasticsearch SQL [ODBC driver](sql-odbc.md) -* A preconfigured User or System DSN (see [Configuration](sql-odbc-setup.md#dsn-configuration) section on how to configure a DSN). - - -## Data loading [_data_loading_2] - -To use the Elasticsearch SQL ODBC Driver to load data into MicroStrategy Desktop perform the following steps in sequence. - -1. Create a New Dossier - - Once the application is launched, you’ll first need to create a *New Dossier*: - - $$$apps_microstrat_newdossier$$$ - ![apps microstrat newdossier](/explore-analyze/images/elasticsearch-reference-apps_microstrat_newdossier.png "") - -2. New Data - - To import into the *New Dossier* just opened, press on the *New Data* button in the *DATASETS* column: - - $$$apps_microstrat_newdata$$$ - ![apps microstrat newdata](/explore-analyze/images/elasticsearch-reference-apps_microstrat_newdata.png "") - -3. Access data from Tables - - This opens a new window that allows to choose the source to load data from. Click on the *Databases* icon: - - $$$apps_microstrat_databases$$$ - ![apps microstrat databases](/explore-analyze/images/elasticsearch-reference-apps_microstrat_databases.png "") - -4. New Data Source - - In the newly opened *Import from Table - Select* window, click on the **+** button to the right of *DATA SOURCES* item: - - $$$apps_microstrat_newds$$$ - ![apps microstrat newds](/explore-analyze/images/elasticsearch-reference-apps_microstrat_newds.png "") - -5. Data Source - - In the *Data Source* window, tick the radio button for *DSN Data Sources*. In the *DSN* drop-down box, choose the name of the DSN that you have previously configured. For the *Version*, chose *Generic DBMS*. - - Input a user name and password in the provided fields. Note that the application requires them irrespective of the fact that they might already be part of the previously configured DSN and the new input will take precedence over those. - - Finally, give a name to the application-specific data source you’re just configuring: - - $$$apps_microstrat_dsn$$$ - ![apps microstrat dsn](/explore-analyze/images/elasticsearch-reference-apps_microstrat_dsn.png "") - -6. Select Import Options - - Choosing an import methodology follows. You can pick any of the options; we’ll exemplify the *Select Tables* option: - - $$$apps_microstrat_tables$$$ - ![apps microstrat tables](/explore-analyze/images/elasticsearch-reference-apps_microstrat_tables.png "") - -7. Import from Table - Select - - The data source you’ve named two steps before is now listed in the *DATA SOURCES* column. Clicking on its name triggers the application to query the {{es}} instance configured in the DSN and list the tables available within: - - $$$apps_microstrat_loadtable$$$ - ![apps microstrat loadtable](/explore-analyze/images/elasticsearch-reference-apps_microstrat_loadtable.png "") - -8. Data Access Mode - - Choose a table to load data from and press the *Finish* button. When doing so, the application offers to choose a loading methodology. You can choose whichever, we’ll exemplify the *Connect Live* way: - - $$$apps_microstrat_live$$$ - ![apps microstrat live](/explore-analyze/images/elasticsearch-reference-apps_microstrat_live.png "") - -9. Visualize the data - - From the *DATASETS* column you can choose what table columns (or index fields) to visualize: - - $$$apps_microstrat_visualize$$$ - ![apps microstrat visualize](/explore-analyze/images/elasticsearch-reference-apps_microstrat_visualize.png "") - - - diff --git a/explore-analyze/query-filter/languages/sql-client-apps-powerbi.md b/explore-analyze/query-filter/languages/sql-client-apps-powerbi.md deleted file mode 100644 index 822b6c2cfd..0000000000 --- a/explore-analyze/query-filter/languages/sql-client-apps-powerbi.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-client-apps-powerbi.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Microsoft Power BI Desktop [sql-client-apps-powerbi] - -You can use the {{es}} ODBC driver to access {{es}} data from Microsoft Power BI Desktop. - -::::{important} -Elastic does not endorse, promote or provide support for this application; for native Elasticsearch integration in this product, reach out to its vendor. -:::: - - -## Prerequisites [_prerequisites_5] - -* [Microsoft Power BI Desktop](https://powerbi.microsoft.com/en-us/desktop/) 2.63 or higher -* Elasticsearch SQL [ODBC driver](sql-odbc.md) -* A preconfigured User or System DSN (see [Configuration](sql-odbc-setup.md#dsn-configuration) section on how to configure a DSN). - - -## Data loading [_data_loading] - -First, you’ll need to choose ODBC as the source to load data from. Once launched, click on the *Get Data* button (under *Home* tab), then on the *More…* button at the bottom of the list: - -$$$apps_pbi_fromodbc1$$$ -![apps pbi fromodbc1](/explore-analyze/images/elasticsearch-reference-apps_pbi_fromodbc1.png "") - -In the new opened window scroll at the bottom of the *All* list and select the *ODBC* entry, then click on the *Connect* button: - -$$$apps_pbi_fromodbc2$$$ -![apps pbi fromodbc2](/explore-analyze/images/elasticsearch-reference-apps_pbi_fromodbc2.png "") - -This will replace current window with a new *From ODBC* one, where you’ll have to select a previously configured DSN: - -$$$apps_pbi_dsn$$$ -![apps pbi dsn](/explore-analyze/images/elasticsearch-reference-apps_pbi_dsn.png "") - -Once connected Power BI will read {{es}}'s catalog and offer the user a choice of tables (indices) to load data from. Clicking on one of the tables will load a preview of the data within: - -$$$apps_pbi_picktable$$$ -![apps pbi picktable](/explore-analyze/images/elasticsearch-reference-apps_pbi_picktable.png "") - -Now tick the chosen table and click on the *Load* button. Power BI will now load and analyze the data, populating a list with the available columns. These can now be used to build the desired visualisation: - -$$$apps_pbi_loaded$$$ -![apps pbi loaded](/explore-analyze/images/elasticsearch-reference-apps_pbi_loaded.png "") - - diff --git a/explore-analyze/query-filter/languages/sql-client-apps-ps1.md b/explore-analyze/query-filter/languages/sql-client-apps-ps1.md deleted file mode 100644 index e7a3f58613..0000000000 --- a/explore-analyze/query-filter/languages/sql-client-apps-ps1.md +++ /dev/null @@ -1,50 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-client-apps-ps1.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Microsoft PowerShell [sql-client-apps-ps1] - -You can use the {{es}} ODBC driver to access {{es}} data from Microsoft PowerShell. - -::::{important} -Elastic does not endorse, promote or provide support for this application; for native Elasticsearch integration in this product, reach out to its vendor. -:::: - - -## Prerequisites [_prerequisites_6] - -* [Microsoft PowerShell](https://docs.microsoft.com/en-us/powershell/) -* Elasticsearch SQL [ODBC driver](sql-odbc.md) -* A preconfigured User or System DSN (see [Configuration](sql-odbc-setup.md#dsn-configuration) section on how to configure a DSN). - - -## Writing a script [_writing_a_script] - -While putting the following instructions into a script file is not an absolute requirement, doing so will make it easier to extend and reuse. The following instructions exemplify how to execute a simple SELECT query from an existing index in your {{es}} instance, using a DSN configured in advance. Open a new file, `select.ps1`, and place the following instructions in it: - -```powershell -$connectstring = "DSN=Local Elasticsearch;" -$sql = "SELECT * FROM library" - -$conn = New-Object System.Data.Odbc.OdbcConnection($connectstring) -$conn.open() -$cmd = New-Object system.Data.Odbc.OdbcCommand($sql,$conn) -$da = New-Object system.Data.Odbc.OdbcDataAdapter($cmd) -$dt = New-Object system.Data.datatable -$null = $da.fill($dt) -$conn.close() -$dt -``` - -Now open a PowerShell shell and simply execute the script: - -$$$apps_excel_exed$$$ -![apps ps exed](/explore-analyze/images/elasticsearch-reference-apps_ps_exed.png "") - - diff --git a/explore-analyze/query-filter/languages/sql-client-apps-qlik.md b/explore-analyze/query-filter/languages/sql-client-apps-qlik.md deleted file mode 100644 index edff7e7c9f..0000000000 --- a/explore-analyze/query-filter/languages/sql-client-apps-qlik.md +++ /dev/null @@ -1,90 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-client-apps-qlik.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Qlik Sense Desktop [sql-client-apps-qlik] - -You can use the {{es}} ODBC driver to access {{es}} data from Qlik Sense Desktop. - -::::{important} -Elastic does not endorse, promote or provide support for this application; for native Elasticsearch integration in this product, reach out to its vendor. -:::: - - -## Prerequisites [_prerequisites_8] - -* [Qlik Sense Desktop](https://www.qlik.com/us/try-or-buy/download-qlik-sense) November 2018 or higher -* Elasticsearch SQL [ODBC driver](sql-odbc.md) -* A preconfigured User or System DSN (see [Configuration](sql-odbc-setup.md#dsn-configuration) section on how to configure a DSN). - - -## Data loading [_data_loading_3] - -To use the Elasticsearch SQL ODBC Driver to load data into Qlik Sense Desktop perform the following steps in sequence. - -1. Create new app - - Once the application is launched, you’ll first need to click on the *Create new app* button: - - $$$apps_qlik_newapp$$$ - ![apps qlik newapp](/explore-analyze/images/elasticsearch-reference-apps_qlik_newapp.png "") - -2. Name app - - …then give it a name, - - $$$apps_qlik_create$$$ - ![apps qlik create](/explore-analyze/images/elasticsearch-reference-apps_qlik_create.png "") - -3. Open app - - …and then open it: - - $$$apps_qlik_open$$$ - ![apps qlik open](/explore-analyze/images/elasticsearch-reference-apps_qlik_open.png "") - -4. Add data to your app - - Start configuring the source to load data from in the newly created app: - - $$$apps_qlik_adddata$$$ - ![apps qlik adddata](/explore-analyze/images/elasticsearch-reference-apps_qlik_adddata.png "") - -5. Load from ODBC - - You’ll be given a choice of sources to select. Click on the *ODBC* icon: - - $$$apps_qlik_odbc$$$ - ![apps qlik odbc](/explore-analyze/images/elasticsearch-reference-apps_qlik_odbc.png "") - -6. Choose DSN - - In the *Create new connection (ODBC)* dialog, click on the DSN name that you have previously configured for your {{es}} instance: - - $$$apps_qlik_dsn$$$ - ![apps qlik dsn](/explore-analyze/images/elasticsearch-reference-apps_qlik_dsn.png "") - - Provide a username and password in the respective fields, if authentication is enabled on your instance and if these are not already part of the DSN. Press the *Create* button. - -7. Select source table - - The application will now connect to the {{es}} instance and query the catalog information, presenting you with a list of tables that you can load data from: - - $$$apps_qlik_selecttable$$$ - ![apps qlik selecttable](/explore-analyze/images/elasticsearch-reference-apps_qlik_selecttable.png "") - -8. Visualize the data - - Press on the *Add data* button and customize your data visualization: - - $$$apps_qlik_visualize$$$ - ![apps qlik visualize](/explore-analyze/images/elasticsearch-reference-apps_qlik_visualize.png "") - - - diff --git a/explore-analyze/query-filter/languages/sql-client-apps-squirrel.md b/explore-analyze/query-filter/languages/sql-client-apps-squirrel.md deleted file mode 100644 index e792bedce0..0000000000 --- a/explore-analyze/query-filter/languages/sql-client-apps-squirrel.md +++ /dev/null @@ -1,64 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-client-apps-squirrel.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# SQuirreL SQL [sql-client-apps-squirrel] - -You can use the {{es}} JDBC driver to access {{es}} data from SQuirreL SQL. - -::::{important} -Elastic does not endorse, promote or provide support for this application; for native Elasticsearch integration in this product, reach out to its vendor. -:::: - - -## Prerequisites [_prerequisites_9] - -* [SQuirreL SQL](http://squirrel-sql.sourceforge.net/) version 4.0.0 or higher -* Elasticsearch SQL [JDBC driver](sql-jdbc.md) - - -## Add {{es}} JDBC Driver [_add_es_jdbc_driver] - -To add the {{es}} JDBC driver, use **Windows** > **View Drivers** menu (or Ctrl+Shift+D shortcut): - -![squirell 1 view drivers](/explore-analyze/images/elasticsearch-reference-squirell-1-view-drivers.png "") - -Select **Elasticsearch** profile from the `Drivers` panel on the left-hand side (if it is missing check the SQuirreL SQL version or add a new entry to the list through the + button in the upper left corner): - -![squirell 2 select driver](/explore-analyze/images/elasticsearch-reference-squirell-2-select-driver.png "") - -Select the **Extra Class Path** tab and **Add** the JDBC jar. Name the connection and **List Drivers** to have `Class Name` populated if it is not already filled-in : - -![squirell 3 add driver](/explore-analyze/images/elasticsearch-reference-squirell-3-add-driver.png "") - -The driver should now appear in the list with a blue check mark next to its name: - -![squirell 4 driver list](/explore-analyze/images/elasticsearch-reference-squirell-4-driver-list.png "") - - -## Add an alias for {{es}} [_add_an_alias_for_es] - -Add a new connection or in SQuirreL terminology an *alias* using the new driver. To do so, select the **Aliases** panel on the left and click the `+` sign: - -![squirell 5 add alias](/explore-analyze/images/elasticsearch-reference-squirell-5-add-alias.png "") - -Name the new alias and select the `Elasticsearch` driver previously added: - -![squirell 6 alias props](/explore-analyze/images/elasticsearch-reference-squirell-6-alias-props.png "") - -The setup is completed. Double check it by clicking on **Test Connection**. - - -## Execute SQL queries [_execute_sql_queries_2] - -The connection should open automatically (if it has been created before simply click on **Connect** in the **Alias** panel). SQuirreL SQL can now issue SQL commands to {{es}}: - -![squirell 7 data](/explore-analyze/images/elasticsearch-reference-squirell-7-data.png "") - - diff --git a/explore-analyze/query-filter/languages/sql-client-apps-tableau-desktop.md b/explore-analyze/query-filter/languages/sql-client-apps-tableau-desktop.md deleted file mode 100644 index 5214d2262b..0000000000 --- a/explore-analyze/query-filter/languages/sql-client-apps-tableau-desktop.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-client-apps-tableau-desktop.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Tableau Desktop [sql-client-apps-tableau-desktop] - -Use the {{es}} JDBC driver and dedicated {{es}} Tableau Connector to access {{es}} data from Tableau Desktop. - -::::{important} -Elastic does not endorse, promote or provide support for this application; for native Elasticsearch integration in this product, reach out to its vendor. -:::: - - -## Prerequisites [sql-client-apps-tableau-desktop-prereqs] - -* [Tableau Desktop](https://www.tableau.com/products/desktop) 2019.4 or later -* Elasticsearch SQL [JDBC driver](sql-jdbc.md) -* [{{es}} Connector for Tableau](https://www.elastic.co/downloads/tableau-connector) - - -## Load data [sql-client-apps-tableau-desktop-load-data] - -First, move or download the JDBC driver to the Tableau Desktop drivers directory: - -* Windows: `C:\Program Files\Tableau\Drivers` -* Mac: `/Users/[user]/Library/Tableau/Drivers` - -Move the {{es}} Connector for Tableau to the Tableau Desktop connectors directory: - -* Windows: `C:\Users\[Windows User]\Documents\My Tableau Repository\Connectors` -* Mac: `/Users/[user]/Documents/My Tableau Repository/Connectors` - -Launch Tableau Desktop. In the menu, click **More…** and select **Elasticsearch by Elastic** as the data source. - -$$$apps_tableau_desktop_from_connector$$$ -![Select Elasticsearch by Elastic as the data source](/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_from_connector.png "") - -In the **New connection** modal, enter the information for your {{es}} instance, and click **Sign In**. - -$$$apps_tableau_connect$$$ -![Sign in](/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_connect.png "") - -In the main window, select your {{es}} instance as the **Database**. Then select a table to load. - -$$$apps_tableau_prepare$$$ -![Select a table to load](/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_prepare.png "") - -Finally, generate a report. - -$$$apps_tableau_report$$$ -![Generate a report](/explore-analyze/images/elasticsearch-reference-apps_tableau_desktop_report.png "") diff --git a/explore-analyze/query-filter/languages/sql-client-apps-tableau-server.md b/explore-analyze/query-filter/languages/sql-client-apps-tableau-server.md deleted file mode 100644 index e968ed29ca..0000000000 --- a/explore-analyze/query-filter/languages/sql-client-apps-tableau-server.md +++ /dev/null @@ -1,56 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-client-apps-tableau-server.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Tableau Server [sql-client-apps-tableau-server] - -Use the {{es}} JDBC driver and dedicated {{es}} Tableau Connector to access {{es}} data from Tableau Server. - -::::{important} -Elastic does not endorse, promote or provide support for this application; for native Elasticsearch integration in this product, reach out to its vendor. -:::: - - -## Prerequisites [sql-client-apps-tableau-server-prereqs] - -* [Tableau Server](https://www.tableau.com/products/server) 2019.4 or later -* Elasticsearch SQL [JDBC driver](sql-jdbc.md) -* [{{es}} Connector for Tableau](https://www.elastic.co/downloads/tableau-connector) - - -## Load data [sql-client-apps-tableau-server-load-data] - -First, move or download the JDBC driver to the Tableau Server drivers directory: - -* Windows: `C:\Program Files\Tableau\Drivers` -* Mac: `/Users/[user]/Library/Tableau/Drivers` - -Move the {{es}} Connector for Tableau to the Tableau Server connectors directory. To find the location of this directory, refer to Tableau Server documentation or use the TSM command line interface. - -Restart Tableau Server. - -To load data into a workbook, add a **New Data Source** from the **Data** menu or using the icon. In the **Connectors** tab of the **Connect to Data** modal, select **Elasticsearch by Elastic**. - -$$$apps_tableau_server_from_connector$$$ -![Select Elasticsearch as the data source](/explore-analyze/images/elasticsearch-reference-apps_tableau_server_from_connector.png "") - -Enter the information for your {{es}} instance, and click **Sign In**. - -$$$apps_tableau_server_connect$$$ -![Sign in](/explore-analyze/images/elasticsearch-reference-apps_tableau_server_connect.png "") - -In the main window, select your {{es}} instance as the **Database**. Then select a table to load. - -$$$apps_tableau_server_prepare$$$ -![Select a table to load](/explore-analyze/images/elasticsearch-reference-apps_tableau_server_prepare.png "") - -Finally, generate a report. - -$$$apps_tableau_server_report$$$ -![Generate a report](/explore-analyze/images/elasticsearch-reference-apps_tableau_server_report.png "") diff --git a/explore-analyze/query-filter/languages/sql-client-apps-workbench.md b/explore-analyze/query-filter/languages/sql-client-apps-workbench.md deleted file mode 100644 index b9821cf88a..0000000000 --- a/explore-analyze/query-filter/languages/sql-client-apps-workbench.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-client-apps-workbench.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# SQL Workbench/J [sql-client-apps-workbench] - -You can use the {{es}} JDBC driver to access {{es}} data from SQL Workbench/J. - -::::{important} -Elastic does not endorse, promote or provide support for this application; for native Elasticsearch integration in this product, reach out to its vendor. -:::: - - -## Prerequisites [_prerequisites_10] - -* [SQL Workbench/J](https://www.sql-workbench.eu/) build 125 or higher -* Elasticsearch SQL [JDBC driver](sql-jdbc.md) - - -## Add {{es}} JDBC driver [_add_es_jdbc_driver_2] - -Add the {{es}} JDBC driver to SQL Workbench/J through **Manage Drivers** either from the main windows in the **File** menu or from the **Connect** window: - -![workbench 1 manage drivers](/explore-analyze/images/elasticsearch-reference-workbench-1-manage-drivers.png "") - -Select **Elasticsearch** profile from the left-hand side (if it is missing check the SQL Workbench/J version or add a new entry to the list through the blank page button in the upper left corner): - -![workbench 2 select driver](/explore-analyze/images/elasticsearch-reference-workbench-2-select-driver.png "") - -Add the JDBC jar (if the driver name hasn’t been picked up already, click on the magnifier button): - -![workbench 3 add jar](/explore-analyze/images/elasticsearch-reference-workbench-3-add-jar.png "") - - -## Create a new connection profile [_create_a_new_connection_profile] - -With the driver configured, create a new connection profile through **File** > **Connect Window** (or Alt+C shortcut): - -![workbench 4 connection](/explore-analyze/images/elasticsearch-reference-workbench-4-connection.png "") - -Select the previously configured driver and set the URL of your cluster using the JDBC syntax. Verify the connection through the **Test** button - a confirmation window should appear that everything is properly configured. - -The setup is complete. - - -## Execute SQL queries [_execute_sql_queries_3] - -SQL Workbench/J is ready to talk to {{es}} through SQL: click on the profile created to execute statements or explore the data: - -![workbench 5 data](/explore-analyze/images/elasticsearch-reference-workbench-5-data.png "") - - diff --git a/explore-analyze/query-filter/languages/sql-client-apps.md b/explore-analyze/query-filter/languages/sql-client-apps.md deleted file mode 100644 index 5dc59907ad..0000000000 --- a/explore-analyze/query-filter/languages/sql-client-apps.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-client-apps.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# SQL Client Applications [sql-client-apps] - -Thanks to its [JDBC](sql-jdbc.md) and [ODBC](sql-odbc.md) interfaces, a broad range of third-party applications can use {{es}}'s SQL capabilities. This section lists, in alphabetical order, a number of them and their respective configuration - the list however is by no means comprehensive (feel free to [submit a PR](https://www.elastic.co/blog/art-of-pull-request) to improve it): as long as the app can use the Elasticsearch SQL driver, it can use Elasticsearch SQL. - -* [DBeaver](sql-client-apps-dbeaver.md) -* [DbVisualizer](sql-client-apps-dbvis.md) -* [Microsoft Excel](sql-client-apps-excel.md) -* [Microsoft Power BI Desktop](sql-client-apps-powerbi.md) -* [Microsoft PowerShell](sql-client-apps-ps1.md) -* [MicroStrategy Desktop](sql-client-apps-microstrat.md) -* [Qlik Sense Desktop](sql-client-apps-qlik.md) -* [SQuirreL SQL](sql-client-apps-squirrel.md) -* [SQL Workbench](sql-client-apps-workbench.md) -* [Tableau Desktop](sql-client-apps-tableau-desktop.md) -* [Tableau Server](sql-client-apps-tableau-server.md) - -::::{important} -Elastic does not endorse, promote or provide support for any of the applications listed. For native Elasticsearch integration in these products, reach out to their respective vendor. -:::: - - -::::{note} -Each application has its own requirements and license these are outside the scope of this documentation which covers only the configuration aspect with Elasticsearch SQL. -:::: - - -::::{warning} -The support for applications implementing the ODBC 2.x standard and prior is currently limited. -:::: - - - - - - - - - - - - - diff --git a/explore-analyze/query-filter/languages/sql-concepts.md b/explore-analyze/query-filter/languages/sql-concepts.md deleted file mode 100644 index b63a913e46..0000000000 --- a/explore-analyze/query-filter/languages/sql-concepts.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -navigation_title: Conventions -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-concepts.html - - https://www.elastic.co/guide/en/elasticsearch/reference/current/_mapping_concepts_across_sql_and_elasticsearch.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Conventions and Terminology [sql-concepts] - -For clarity, it is important to establish the meaning behind certain words as, the same wording might convey different meanings to different readers depending on one’s familiarity with SQL versus {{es}}. - -::::{note} -This documentation while trying to be complete, does assume the reader has *basic* understanding of {{es}} and/or SQL. If that is not the case, continue reading the documentation however take notes and pursue the topics that are unclear either through the main {{es}} documentation or through the plethora of SQL material available in the open (there are simply too many excellent resources here to enumerate). -:::: - - -As a general rule, Elasticsearch SQL as the name indicates provides a SQL interface to {{es}}. As such, it follows the SQL terminology and conventions first, whenever possible. However the backing engine itself is {{es}} for which Elasticsearch SQL was purposely created hence why features or concepts that are not available, or cannot be mapped correctly, in SQL appear in Elasticsearch SQL. Last but not least, Elasticsearch SQL tries to obey the [principle of least surprise](https://en.wikipedia.org/wiki/Principle_of_least_astonishment), though as all things in the world, everything is relative. - -## Mapping concepts across SQL and Elasticsearch [_mapping_concepts_across_sql_and_es] - -While SQL and {{es}} have different terms for the way the data is organized (and different semantics), essentially their purpose is the same. - -So let’s start from the bottom; these roughly are: - -| SQL | {{es}} | Description | -| --- | --- | --- | -| `column` | `field` | In both cases, at the lowest level, data is stored in *named* entries, of a variety of [data types](elasticsearch://reference/query-languages/sql/sql-data-types.md), containing *one* value. SQL calls such an entry a *column* while {{es}} a *field*. Notice that in {{es}} a field can contain *multiple* values of the same type (essentially a list) while in SQL, a *column* can contain *exactly* one value of said type. Elasticsearch SQL will do its best to preserve the SQL semantic and, depending on the query, reject those that return fields with more than one value. | -| `row` | `document` | `Column`s and `field`s do *not* exist by themselves; they are part of a `row` or a `document`. The two have slightly different semantics: a `row` tends to be *strict* (and have more enforcements) while a `document` tends to be a bit more flexible or loose (while still having a structure). | -| `table` | `index` | The target against which queries, whether in SQL or {{es}} get executed against. | -| `schema` | *implicit* | In RDBMS, `schema` is mainly a namespace of tables and typically used as a security boundary. {{es}} does not provide an equivalent concept for it. However when security is enabled, {{es}} automatically applies the security enforcement so that a role sees only the data it is allowed to (in SQL jargon, its *schema*). | -| `catalog` or `database` | `cluster` instance | In SQL, `catalog` or `database` are used interchangeably and represent a set of schemas that is, a number of tables.In {{es}} the set of indices available are grouped in a `cluster`. The semantics also differ a bit; a `database` is essentially yet another namespace (which can have some implications on the way data is stored) while an {{es}} `cluster` is a runtime instance, or rather a set of at least one {{es}} instance (typically running distributed).In practice this means that while in SQL one can potentially have multiple catalogs inside an instance, in {{es}} one is restricted to only *one*. | -| `cluster` | `cluster` (federated) | Traditionally in SQL, *cluster* refers to a single RDBMS instance which contains a number of `catalog`s or `database`s (see above). The same word can be reused inside {{es}} as well however its semantic clarified a bit.
While RDBMS tend to have only one running instance, on a single machine (*not* distributed), {{es}} goes the opposite way and by default, is distributed and multi-instance.
Further more, an {{es}} `cluster` can be connected to other `cluster`s in a *federated* fashion thus `cluster` means:
single cluster::Multiple {{es}} instances typically distributed across machines, running within the same namespace.multiple clusters::Multiple clusters, each with its own namespace, connected to each other in a federated setup (see [{{ccs-cap}}](../../../solutions/search/cross-cluster-search.md)). | - -As one can see while the mapping between the concepts are not exactly one to one and the semantics somewhat different, there are more things in common than differences. In fact, thanks to SQL declarative nature, many concepts can move across {{es}} transparently and the terminology of the two likely to be used interchangeably throughout the rest of the material. - - - - diff --git a/explore-analyze/query-filter/languages/sql-getting-started.md b/explore-analyze/query-filter/languages/sql-getting-started.md deleted file mode 100644 index 498ce3e6c4..0000000000 --- a/explore-analyze/query-filter/languages/sql-getting-started.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -navigation_title: Getting started -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-getting-started.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Getting Started with SQL [sql-getting-started] - -To start using Elasticsearch SQL, create an index with some data to experiment with: - -```console -PUT /library/_bulk?refresh -{"index":{"_id": "Leviathan Wakes"}} -{"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561} -{"index":{"_id": "Hyperion"}} -{"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482} -{"index":{"_id": "Dune"}} -{"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604} -``` - -And now you can execute SQL using the [SQL search API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-query): - -```console -POST /_sql?format=txt -{ - "query": "SELECT * FROM library WHERE release_date < '2000-01-01'" -} -``` - -Which should return something along the lines of: - -```text - author | name | page_count | release_date ----------------+---------------+---------------+------------------------ -Dan Simmons |Hyperion |482 |1989-05-26T00:00:00.000Z -Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z -``` - -You can also use the [*SQL CLI*](sql-cli.md). There is a script to start it shipped in the Elasticsearch `bin` directory: - -```bash -$ ./bin/elasticsearch-sql-cli -``` - -From there you can run the same query: - -```sql -sql> SELECT * FROM library WHERE release_date < '2000-01-01'; - author | name | page_count | release_date ----------------+---------------+---------------+------------------------ -Dan Simmons |Hyperion |482 |1989-05-26T00:00:00.000Z -Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z -``` - diff --git a/explore-analyze/query-filter/languages/sql-jdbc-api-usage.md b/explore-analyze/query-filter/languages/sql-jdbc-api-usage.md deleted file mode 100644 index ba08673333..0000000000 --- a/explore-analyze/query-filter/languages/sql-jdbc-api-usage.md +++ /dev/null @@ -1,83 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/_api_usage.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# API usage [_api_usage] - -One can use JDBC through the official `java.sql` and `javax.sql` packages: - -## `java.sql` [java-sql] - -The former through `java.sql.Driver` and `DriverManager`: - -```java -String address = "jdbc:es://" + elasticsearchAddress; <1> -Properties connectionProperties = connectionProperties(); <2> -Connection connection = - DriverManager.getConnection(address, connectionProperties); -``` - -1. The server and port on which Elasticsearch is listening for HTTP traffic. The port is by default 9200. -2. Properties for connecting to Elasticsearch. An empty `Properties` instance is fine for unsecured Elasticsearch. - - - -## `javax.sql` [javax-sql] - -Accessible through the `javax.sql.DataSource` API: - -```java -EsDataSource dataSource = new EsDataSource(); -String address = "jdbc:es://" + elasticsearchAddress; <1> -dataSource.setUrl(address); -Properties connectionProperties = connectionProperties(); <2> -dataSource.setProperties(connectionProperties); -Connection connection = dataSource.getConnection(); -``` - -1. The server and port on which Elasticsearch is listening for HTTP traffic. By default 9200. -2. Properties for connecting to Elasticsearch. An empty `Properties` instance is fine for unsecured Elasticsearch. - - -Which one to use? Typically client applications that provide most configuration properties in the URL rely on the `DriverManager`-style while `DataSource` is preferred when being *passed* around since it can be configured in one place and the consumer only has to call `getConnection` without having to worry about any other properties. - -To connect to a secured Elasticsearch server the `Properties` should look like: - -```java -Properties properties = new Properties(); -properties.put("user", "test_admin"); -properties.put("password", "x-pack-test-password"); -``` - -Once you have the connection you can use it like any other JDBC connection. For example: - -```java -try (Statement statement = connection.createStatement(); - ResultSet results = statement.executeQuery( - " SELECT name, page_count" - + " FROM library" - + " ORDER BY page_count DESC" - + " LIMIT 1")) { - assertTrue(results.next()); - assertEquals("Don Quixote", results.getString(1)); - assertEquals(1072, results.getInt(2)); - SQLException e = expectThrows(SQLException.class, () -> - results.getInt(1)); - assertThat(e.getMessage(), containsString("Unable to convert " - + "value [Don Quixote] of type [TEXT] to [Integer]")); - assertFalse(results.next()); -} -``` - -::::{note} -Elasticsearch SQL doesn’t provide a connection pooling mechanism, thus the connections the JDBC driver creates are not pooled. In order to achieve pooled connections, a third-party connection pooling mechanism is required. Configuring and setting up the third-party provider is outside the scope of this documentation. -:::: - - - diff --git a/explore-analyze/query-filter/languages/sql-jdbc.md b/explore-analyze/query-filter/languages/sql-jdbc.md deleted file mode 100644 index 4103887142..0000000000 --- a/explore-analyze/query-filter/languages/sql-jdbc.md +++ /dev/null @@ -1,208 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-jdbc.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# SQL JDBC [sql-jdbc] - -{{es}}'s SQL jdbc driver is a rich, fully featured JDBC driver for {{es}}. It is Type 4 driver, meaning it is a platform independent, stand-alone, Direct to Database, pure Java driver that converts JDBC calls to Elasticsearch SQL. - - -## Installation [sql-jdbc-installation] - -The JDBC driver can be obtained from: - -Dedicated page -: [elastic.co](https://www.elastic.co/downloads/jdbc-client) provides links, typically for manual downloads. - -Maven dependency -: [Maven](https://maven.apache.org/)-compatible tools can retrieve it automatically as a dependency: - -```xml subs=true - - org.elasticsearch.plugin - x-pack-sql-jdbc - {{version.stack}} - -``` - -from [Maven Central Repository](https://search.maven.org/artifact/org.elasticsearch.plugin/x-pack-sql-jdbc), or from `artifacts.elastic.co/maven` by adding it to the repositories list: - -```xml - - - elastic.co - https://artifacts.elastic.co/maven - - -``` - - -## Version compatibility [jdbc-compatibility] - -Your driver must be compatible with your {{es}} version. - -::::{important} -The driver version cannot be newer than the {{es}} version. For example, {{es}} version 7.10.0 is not compatible with {{version.stack}} drivers. -:::: - - -| {{es}} version | Compatible driver versions | Example | -| --- | --- | --- | -| 7.7.0 and earlier versions | * The same version.
| {{es}} 7.6.1 is only compatible with 7.6.1 drivers. | - - -## Setup [jdbc-setup] - -The driver main class is `org.elasticsearch.xpack.sql.jdbc.EsDriver`. Note the driver implements the JDBC 4.0 `Service Provider` mechanism meaning it is registered automatically as long as it is available in the classpath. - -Once registered, the driver understands the following syntax as an URL: - -```text -jdbc:[es|elasticsearch]://[[http|https]://]?[host[:port]]?/[prefix]?[\?[option=value]&]* -``` - -`jdbc:[es|elasticsearch]://` -: Prefix. Mandatory. - -`[[http|https]://]` -: Type of HTTP connection to make. Possible values are `http` (default) or `https`. Optional. - -`[host[:port]]` -: Host (`localhost` by default) and port (`9200` by default). Optional. - -`[prefix]` -: Prefix (empty by default). Typically used when hosting {{es}} under a certain path. Optional. - -`[option=value]` -: Properties for the JDBC driver. Empty by default. Optional. - -The driver recognized the following properties: - - -#### Essential [jdbc-cfg] - -$$$jdbc-cfg-timezone$$$ - -`timezone` (default JVM timezone) -: Timezone used by the driver *per connection* indicated by its `ID`. **Highly** recommended to set it (to, say, `UTC`) as the JVM timezone can vary, is global for the entire JVM and can’t be changed easily when running under a security manager. - - -#### Network [jdbc-cfg-network] - -`connect.timeout` (default `30000`) -: Connection timeout (in milliseconds). That is the maximum amount of time waiting to make a connection to the server. - -`network.timeout` (default `60000`) -: Network timeout (in milliseconds). That is the maximum amount of time waiting for the network. - -`page.size` (default `1000`) -: Page size (in entries). The number of results returned per page by the server. - -`page.timeout` (default `45000`) -: Page timeout (in milliseconds). Minimum retention period for the scroll cursor on the server. Queries that require a stateful scroll cursor on the server side might fail after this timeout. Hence, when scrolling through large result sets, processing `page.size` records should not take longer than `page.timeout` milliseconds. - -`query.timeout` (default `90000`) -: Query timeout (in milliseconds). That is the maximum amount of time waiting for a query to return. - - -### Basic Authentication [jdbc-cfg-auth] - -`user` -: Basic Authentication user name - -`password` -: Basic Authentication password - - -### SSL [jdbc-cfg-ssl] - -`ssl` (default `false`) -: Enable SSL - -`ssl.keystore.location` -: key store (if used) location - -`ssl.keystore.pass` -: key store password - -`ssl.keystore.type` (default `JKS`) -: key store type. `PKCS12` is a common, alternative format - -`ssl.truststore.location` -: trust store location - -`ssl.truststore.pass` -: trust store password - -`ssl.truststore.type` (default `JKS`) -: trust store type. `PKCS12` is a common, alternative format - -`ssl.protocol`(default `TLS`) -: SSL protocol to be used - - -### Proxy [_proxy] - -`proxy.http` -: Http proxy host name - -`proxy.socks` -: SOCKS proxy host name - - -### Mapping [_mapping] - -`field.multi.value.leniency` (default `true`) -: Whether to be lenient and return the first value (without any guarantees of what that will be - typically the first in natural ascending order) for fields with multiple values (true) or throw an exception. - - -### Index [_index] - -`index.include.frozen` (default `false`) -: Whether to include frozen indices in the query execution or not (default). - - -### Cluster [_cluster] - -`catalog` -: Default catalog (cluster) for queries. If unspecified, the queries execute on the data in the local cluster only. - - See [{{ccs}}](../../../solutions/search/cross-cluster-search.md). - - - -### Error handling [_error_handling] - -`allow.partial.search.results` (default `false`) -: Whether to return partial results in case of shard failure or fail the query throwing the underlying exception (default). - - -### Troubleshooting [_troubleshooting] - -`debug` (default `false`) -: Setting it to `true` will enable the debug logging. - -`debug.output` (default `err`) -: The destination of the debug logs. By default, they are sent to standard error. Value `out` will redirect the logging to standard output. A file path can also be specified. - - -### Additional [_additional] - -`validate.properties` (default `true`) -: If disabled, it will ignore any misspellings or unrecognizable properties. When enabled, an exception will be thrown if the provided property cannot be recognized. - -To put all of it together, the following URL: - -```text -jdbc:es://http://server:3456/?timezone=UTC&page.size=250 -``` - -opens up a Elasticsearch SQL connection to `server` on port `3456`, setting the JDBC connection timezone to `UTC` and its pagesize to `250` entries. - - diff --git a/explore-analyze/query-filter/languages/sql-odbc-installation.md b/explore-analyze/query-filter/languages/sql-odbc-installation.md deleted file mode 100644 index 31d0b2ada4..0000000000 --- a/explore-analyze/query-filter/languages/sql-odbc-installation.md +++ /dev/null @@ -1,203 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-odbc-installation.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Driver installation [sql-odbc-installation] - -The Elasticsearch SQL ODBC Driver can be installed on Microsoft Windows using an MSI package. The installation process is simple and is composed of standard MSI wizard steps. - -## Installation Prerequisites [prerequisites] - -The recommended installation platform is Windows 10 64 bit or Windows Server 2016 64 bit. - -Before you install the Elasticsearch SQL ODBC Driver you need to meet the following prerequisites; - -* .NET Framework 4.x full, latest - [https://dotnet.microsoft.com/download/dotnet-framework](https://dotnet.microsoft.com/download/dotnet-framework) -* Microsoft Visual C++ Redistributable for Visual Studio 2017 or later - [https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist](https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist) - - * The 64 bit driver requires the x64 redistributable - * The 32 bit driver requires the x86 or the x64 redistributable (the latter also installs the components needed for the 32 bit driver) - -* Elevated privileges (administrator) for the User performing the installation. - -If you fail to meet any of the prerequisites the installer will show an error message and abort the installation. - -::::{note} -It is not possible to inline upgrade using the MSI. In order to upgrade, you will first have to uninstall the old driver and then install the new driver. -:::: - - -::::{note} -When installing the MSI, the Windows Defender SmartScreen might warn about running an unrecognized app. If the MSI has been downloaded from Elastic’s web site, it is safe to acknowledge the message by allowing the installation to continue (`Run anyway`). -:::: - - - -## Version compatibility [odbc-compatibility] - -Your driver must be compatible with your {{es}} version. - -::::{important} -The driver version cannot be newer than the {{es}} version. For example, {{es}} version 7.10.0 is not compatible with {{version.stack}} drivers. -:::: - - -| {{es}} version | Compatible driver versions | Example | -| --- | --- | --- | -| 7.7.0 and earlier versions | * The same version.
| {{es}} 7.6.1 is only compatible with 7.6.1 drivers. | - - -## Download the `.msi` package(s) [download] - -Download the `.msi` package for Elasticsearch SQL ODBC Driver {{version.stack}} from: [https://www.elastic.co/downloads/odbc-client](https://www.elastic.co/downloads/odbc-client) - -There are two versions of the installer available: - -* **32 bit driver (x86)** for use with the Microsoft Office 2016 suite of applications; notably Microsoft Excel and Microsoft Access and other 32 bit based programs. -* **64 bit driver (x64)** recommended for use with all other applications. - -Users should consider downloading and installing both the 32 and 64 bit drivers for maximum compatibility across applications installed on their system. - - -## Installation using the graphical user interface (GUI) [installation-gui] - -Double-click the downloaded `.msi` package to launch a GUI wizard that will guide you through the installation process. - -You will first be presented with a welcome screen: - -:::{image} /explore-analyze/images/elasticsearch-reference-installer_started.png -:alt: Installer Welcome Screen -::: - -Clicking **Next** will present the End User License Agreement. You will need to accept the license agreement in order to continue the installation. - -:::{image} /explore-analyze/images/elasticsearch-reference-installer_accept_license.png -:alt: Installer EULA Screen -::: - -The following screen allows you to customise the installation path for the Elasticsearch ODBC driver files. - -::::{note} -The default installation path is of the format: **%ProgramFiles%\Elastic\ODBCDriver\\{{version.stack}}** -:::: - - -:::{image} /explore-analyze/images/elasticsearch-reference-installer_choose_destination.png -:alt: Installer Driver Path -::: - -You are now ready to install the driver. - -::::{note} -You will require elevated privileges (administrator) for installation. -:::: - - -:::{image} /explore-analyze/images/elasticsearch-reference-installer_ready_install.png -:alt: Installer Begin -::: - -Assuming the installation takes place without error you should see progress screen, followed by the finish screen: - -:::{image} /explore-analyze/images/elasticsearch-reference-installer_installing.png -:alt: Installer Installing -::: - -On the finish screen you can launch the ODBC Data Source Administration screen by checking the dialog checkbox. This will automatically launch the configuration screen on close (either 32 bit or 64 bit) where you can configure a DSN. - -:::{image} /explore-analyze/images/elasticsearch-reference-installer_finish.png -:alt: Installer Complete -::: - -As with any MSI installation package, a log file for the installation process can be found within the `%TEMP%` directory, with a randomly generated name adhering to the format `MSI.LOG`. - -If you encounter an error during installation we would encourage you to open an issue [https://github.com/elastic/elasticsearch-sql-odbc/issues](https://github.com/elastic/elasticsearch-sql-odbc/issues), attach your installation log file and provide additional details so we can investigate. - - -## Installation using the command line [installation-cmd] - -::::{note} -The examples given below apply to installation of the 64 bit MSI package. To achieve the same result with the 32 bit MSI package you would instead use the filename suffix `windows-x86.msi` -:::: - - -The `.msi` can also be installed via the command line. The simplest installation using the same defaults as the GUI is achieved by first navigating to the download directory, then running: - -```sh subs=true -msiexec.exe /i esodbc-{{version.stack}}-windows-x86_64.msi /qn -``` - -By default, `msiexec.exe` does not wait for the installation process to complete, since it runs in the Windows subsystem. To wait on the process to finish and ensure that `%ERRORLEVEL%` is set accordingly, it is recommended to use `start /wait` to create a process and wait for it to exit: - -```sh subs=true -start /wait msiexec.exe /i esodbc-{{version.stack}}-windows-x86_64.msi /qn -``` - -As with any MSI installation package, a log file for the installation process can be found within the `%TEMP%` directory, with a randomly generated name adhering to the format `MSI.LOG`. The path to a log file can be supplied using the `/l` command line argument - -```sh subs=true -start /wait msiexec.exe /i esodbc-{{version.stack}}-windows-x86_64.msi /qn /l install.log -``` - -Supported Windows Installer command line arguments can be viewed using: - -```sh -msiexec.exe /help -``` - -…or by consulting the [Windows Installer SDK Command-Line Options](https://msdn.microsoft.com/en-us/library/windows/desktop/aa367988(v=vs.85).aspx). - -### Command line options [odbc-msi-command-line-options] - -All settings exposed within the GUI are also available as command line arguments (referred to as *properties* within Windows Installer documentation) that can be passed to `msiexec.exe`: - -`INSTALLDIR` -: The installation directory. Defaults to _%ProgramFiles%\Elastic\ODBCDriver\\{{version.stack}}_. - -To pass a value, simply append the property name and value using the format `=""` to the installation command. For example, to use a different installation directory to the default one: - -```sh subs=true -start /wait msiexec.exe /i esodbc-{{version.stack}}-windows-x86_64.msi /qn INSTALLDIR="c:\CustomDirectory" -``` - -Consult the [Windows Installer SDK Command-Line Options](https://msdn.microsoft.com/en-us/library/windows/desktop/aa367988(v=vs.85).aspx) for additional rules related to values containing quotation marks. - - -### Uninstall using Add/Remove Programs [odbc-uninstall-msi-gui] - -The `.msi` package handles uninstallation of all directories and files added as part of installation. - -::::{warning} -Uninstallation will remove **all** contents created as part of installation. -:::: - - -An installed program can be uninstalled by pressing the Windows key and typing `add or remove programs` to open the system settings. - -Once opened, find the Elasticsearch ODBC Driver installation within the list of installed applications, click and choose `Uninstall`: - -:::{image} /explore-analyze/images/elasticsearch-reference-uninstall.png -:alt: uninstall -:name: odbc-msi-installer-uninstall -::: - - -### Uninstall using the command line [odbc-uninstall-msi-command-line] - -Uninstallation can also be performed from the command line by navigating to the directory containing the `.msi` package and running: - -```sh subs=true -start /wait msiexec.exe /x esodbc-{{version.stack}}-windows-x86_64.msi /qn -``` - -Similar to the install process, a path for a log file for the uninstallation process can be passed using the `/l` command line argument - -```sh subs=true -start /wait msiexec.exe /x esodbc-{{version.stack}}-windows-x86_64.msi /qn /l uninstall.log -``` diff --git a/explore-analyze/query-filter/languages/sql-odbc-setup.md b/explore-analyze/query-filter/languages/sql-odbc-setup.md deleted file mode 100644 index e5e12652bc..0000000000 --- a/explore-analyze/query-filter/languages/sql-odbc-setup.md +++ /dev/null @@ -1,397 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-odbc-setup.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Configuration [sql-odbc-setup] - -Once the driver has been installed, in order for an application to be able to connect to {{es}} through ODBC, a set of configuration parameters must be provided to the driver. Depending on the application, there are generally three ways of providing these parameters: - -* through a connection string; -* using a User DSN or System DSN; -* through a File DSN. - -DSN (*data source name*) is a generic name given to the set of parameters an ODBC driver needs to connect to a database. - -We will refer to these parameters as *connection parameters* or *DSN* (despite some of these parameters configuring some other aspects of a driver’s functions; e.g. logging, buffer sizes…). - -Using a DSN is the most widely used, simplest and safest way of performing the driver configuration. Constructing a connection string, on the other hand, is the most crude way and consequently the least common method. - -We will focus on DSN usage only. - -## 1. Launching ODBC Data Source Administrator [data-source-administrator] - -For DSN management, ODBC provides the *ODBC Data Source Administrator* application, readily installed on all recent desktop Windows operating systems. - -* The 32-bit version of the Odbcad32.exe file is located in the `%systemdrive%\Windows\SysWoW64` folder. -* The 64-bit version of the Odbcad32.exe file is located in the `%systemdrive%\Windows\System32` folder. - -To launch it, open the search menu - *Win + S* - and type "ODBC Data Sources (64-bit)" or "ODBC Data Sources (32-bit)" and press *Enter*: - -$$$launch_administrator$$$ -![launch administrator](/explore-analyze/images/elasticsearch-reference-launch_administrator.png "") - -Once launched, you can verify that the driver was installed correctly by clicking on the *Drivers* tab of the ODBC Data Source Administrator and checking that *Elasticsearch Driver* is present in the list of installed drivers. - -You should also see the version number of the installed driver. - -$$$administrator_drivers$$$ -![administrator drivers](/explore-analyze/images/elasticsearch-reference-administrator_drivers.png "") - - -## 2. Configure a DSN [dsn-configuration] - -The next step is to configure a DSN. You can choose between the following options mapped on the first three tabs of the Administrator application: - -* User DSN - - The connections configured under this tab are only available to the currently logged in user. Each of these DSNs are referred to by a chosen arbitrary name (typically a host or cluster name). - - The actual set of parameters making up the DSN is stored through the driver in the system registry. Thus, a user will later only need to provide an application with the DSN name in order to connect to the configured {{es}} instance. - -* System DSN - - Similar to a User DSN, except that the connections configured under this tab will be available to all the users configured on the system. - -* File DSN - - This tab contains functionality that will allow to have one set of connection parameters written into a file, rather then the Registry. - - Such a file can be then shared among multiple systems and the user will need to specify the path to it, in order to have the application connect to the configured {{es}} instance. - - -The configuration steps are similar for all the above points. Following is an example of configuring a System DSN. - - -#### 2.1 Launch Elasticsearch SQL ODBC Driver DSN Editor [_2_1_launch_elasticsearch_sql_odbc_driver_dsn_editor] - -Click on the *System DSN* tab, then on the *Add…* button: - -$$$system_add$$$ -![administrator system add](/explore-analyze/images/elasticsearch-reference-administrator_system_add.png "") - -A new window will open, listing all available installed drivers. Click on *{{es}} Driver*, to highlight it, then on the *Finish* button: - -$$$launch_editor$$$ -![administrator launch editor](/explore-analyze/images/elasticsearch-reference-administrator_launch_editor.png "") - -This action closes the previously opened second window and open a new one instead, Elasticsearch SQL ODBC Driver’s DSN Editor: - -$$$dsn_editor$$$ -![dsn editor basic](/explore-analyze/images/elasticsearch-reference-dsn_editor_basic.png "") - -This new window has three tabs, each responsible for a set of configuration parameters, as follows. - - -#### 2.2 Connection parameters [connection_parameters] - -This tab allows configuration for the following items: - -* Name - This is the name the DSN will be referred by. - - ::::{note} - The characters available for this field are limited to the set permitted for a Registry key. - :::: - - - Example: *localhost* - -* Description - - This field allows a arbitrary text; generally used for short notes about the configured connection. - - Example: *Clear-text connection to the local [::1]:9200.* - -* Cloud ID - - The *Cloud ID* is a string that simplifies the configuration when connecting to {{ecloud}}. It encodes the connection parameters to that cluster. You can obtain it from from your deployment's details in the {{ecloud}} Console. - - ::::{note} - When this field is provisioned, the *Hostname*, *Port* and the security settings are provisioned as well and their respective inputs disabled. - :::: - -* Hostname - - This field requires an IP address or a resolvable DNS name of the {{es}} instance that the driver will connect to. - - Example: *::1* - -* Port - - The port on which the {{es}} listens on. - - ::::{note} - If left empty, the default **9200** port number will be used. - :::: - -* Username, Password - - If security is enabled, these fields will need to contain the credentials of the access user. - - -At a minimum, the *Name* and *Hostname* fields must be provisioned, before the DSN can be saved. - -::::{warning} -Connection encryption is enabled by default. This will need to be changed if connecting to an {{es}} node with no encryption. -:::: - - - -#### 2.3 Cryptography parameters [_2_3_cryptography_parameters] - -One of the following SSL options can be chosen: - -* Disabled. All communications unencrypted. - - The communication between the driver and the {{es}} instance is performed over a clear-text connection. - - ::::{warning} - This setting can expose the access credentials to a 3rd party intercepting the network traffic and is not recommended. - :::: - -* Enabled. Certificate not validated. - - The connection encryption is enabled, but the certificate of the server is not validated. - - This is currently the default setting. - - ::::{note} - This setting allows a 3rd party to act with ease as a man-in-the-middle and thus intercept all communications. - :::: - -* Enabled. Certificate is validated; hostname not validated. - - The connection encryption is enabled and the driver verifies that server’s certificate is valid, but it does **not** verify if the certificate is running on the server it was meant for. - - ::::{note} - This setting allows a 3rd party that had access to server’s certificate to act as a man-in-the-middle and thus intercept all the communications. - :::: - -* Enabled. Certificate is validated; hostname validated. - - The connection encryption is enabled and the driver verifies that both the certificate is valid, as well as that it is being deployed on the server that the certificate was meant for. - -* Enabled. Certificate identity chain validated. - - This setting is equivalent to the previous one, with one additional check against certificate’s revocation. This offers the strongest security option and is the recommended setting for production deployments. - -* Certificate File - - In case the server uses a certificate that is not part of the PKI, for example using a self-signed certificate, you can configure the path to a X.509 certificate file that will be used by the driver to validate server’s offered certificate. - - The driver will only read the contents of the file just before a connection is attempted. See [2.7 Testing the connection](#connection_testing) section further on how to check the validity of the provided parameters. - - ::::{note} - The certificate file can not be bundled or password protected since the driver will not prompt for a password. - :::: - - - If using the file browser to locate the certificate - by pressing the *Browse…* button - only files with *.pem* and *.der* extensions will be considered by default. Choose *All Files (*.*)* from the drop down, if your file ends with a different extension: - - $$$dsn_editor_cert$$$ - ![dsn editor security cert](/explore-analyze/images/elasticsearch-reference-dsn_editor_security_cert.png "") - - - -#### 2.4 Proxy parameters [_2_4_proxy_parameters] - -If connecting to the {{es}} node needs to go through a proxy, the following parameters need to be configured: - -* Type - - What kind of protocol to use when connecting to the proxy host. This also mandates how the {{es}} node you want to connect to over the proxy needs to be specified under [2.2 Connection parameters](#connection_parameters): - - * HTTP, SOCKS4A, SOCKS5H: either IP address or host name is accepted; the proxy will resolve the DNS name; - * SOCKS4, SOCKS5: {{es}} node location needs to be provided as an IP address; - -* Port - - The TCP port the proxy is listening for connections on. - -* Username - - The user part of the credentials used to authenticate to the proxy. - -* Password - - The password part of the credentials for the proxy. - - -$$$dsn_editor_proxy$$$ -![dsn editor proxy](/explore-analyze/images/elasticsearch-reference-dsn_editor_proxy.png "") - - -#### 2.5 Connection parameters [_2_5_connection_parameters] - -The connection configuration can further be tweaked by the following parameters. - -* Request timeout (s) - - The maximum time (in seconds) a request to the server can take. This can be overridden by a larger statement-level timeout setting. The value 0 means no timeout. - -* Max page size (rows) - - The maximum number of rows that Elasticsearch SQL server should send the driver for one page. This corresponds to the SQL search API’s [`fetch_size`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-query) parameter. A `0` value indicates a server default. - -* Max page length (MB) - - The maximum size (in megabytes) that an answer can grow to, before being rejected as too large by the driver. This is concerning the HTTP answer body of one page, not the cumulated data volume that a query might generate. - -* Varchar limit - - The maximum width of the string columns. If this setting is greater than zero, the driver will advertise all the string type columns as having a maximum character length equal to this value and will truncate any longer string to it. The string types are textual fields (TEXT, KEYWORD etc.) and some specialized fields (IP, the GEOs etc.). Note that no interpretation of the value is performed before truncation, which can lead to invalid values if the limit is set too low. This is required for those applications that do not support column lengths as large as {{es}} fields can be. - -* Floats format - - Controls how the floating point numbers will be printed, when these are converted to string by the driver. Possible values given to this parameter: - - * `scientific`: the exponential notation (ex.: 1.23E01); - * `default`: the default notation (ex.: 12.3); - * `auto`: the driver will choose one of the above depending on the value to be printed. Note that the number of decimals is dependent on the precision (or ODBC scale) of the value being printed and varies with the different floating point types supported by Elasticsearch SQL. This setting is not effective when the application fetches from the driver the values as numbers and then does the conversion subsequently itself. - -* Data encoding - - This value controls which data format to encode the REST content in. Possible values are: - - * `CBOR`: use the Concise Binary Object Representation format. This is the preferred encoding, given its more compact format. - * `JSON`: use the JavaScript Object Notation format. This format is more verbose, but easier to read, especially useful if troubleshooting. - -* Data compression - - This setting controls if and when the REST content - encoded in one of the above formats - is going to be compressed. The possible values are: - - * `on`: enables the compression; - * `off`: disables the compression; - * `auto`: enables the compression, except for the case when the data flows through a secure connection; since in this case the encryption layer employs its own data compression and there can be security implications when an additional compression is enabled, the setting should be kept to this value. - -* Follow HTTP redirects - - Should the driver follow HTTP redirects of the requests to the server? - -* Use local timezone - - This setting controls the timezone of: - - * the context in which the query will execute (especially relevant for functions dealing with timestamp components); - * the timestamps received from / sent to the server. - - If disabled, the UTC timezone will apply; otherwise, the local machine’s set timezone. - -* Auto-escape PVAs - - The pattern-value arguments make use of `_` and `%` as special characters to build pattern matching values. Some applications however use these chars as regular ones, which can lead to Elasticsearch SQL returning more data than the app intended. With the auto escaping, the driver will inspect the arguments and will escape these special characters if not already done by the application. - -* Multi value field lenient - - This setting controls the behavior of the server in case a multi-value field is queried. In case this is set and the server encounters such a field, it will pick a value in the set - without any guarantees of what that will be, but typically the first in natural ascending order - and return it as the value for the column. If not set, the server will return an error. This corresponds to the SQL search API’s [`field_multi_value_leniency`](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-query) parameter. - -* Include frozen indices - - If this parameter is `true`, the server will include the frozen indices in the query execution. This corresponds to Elasticsearch SQL’s request parameter `index_include_frozen` - -* Early query execution - - If this configuration is set, the driver will execute a statement as soon as the application submits it for preparation - i.e. early - and is functionally equivalent to a direct execution. This will only happen if the query lacks parameters. Early execution is useful with those applications that inspect the result before actually executing the query. Elasticsearch SQL lacks a preparation API, so early execution is required for interoperability with these applications. - - -$$$dsn_editor_misc$$$ -![dsn editor misc](/explore-analyze/images/elasticsearch-reference-dsn_editor_misc.png "") - - -#### 2.6 Logging parameters [_2_6_logging_parameters] - -For troubleshooting purposes, the Elasticsearch SQL ODBC Driver offers functionality to log the API calls that an application makes; this is enabled in the Administrator application: - -$$$administrator_tracing$$$ -![administrator tracing](/explore-analyze/images/elasticsearch-reference-administrator_tracing.png "") - -However, this only logs the ODBC API calls made by the application into the *Driver Manager* and not those made by the *Driver Manager* into the driver itself. To enable logging of the calls that the driver receives, as well as internal driver processing events, you can enable driver’s logging on Editor’s *Logging* tab: - -* Enable Logging? - - Ticking this will enable driver’s logging. A logging directory is also mandatory when this option is enabled (see the next option). However the specified logging directory will be saved in the DSN if provided, even if logging is disabled. - -* Log Directory - - Here is to specify which directory to write the log files in. - - ::::{note} - The driver will create **one log file per connection**, for those connections that generate logging messages. - :::: - -* Log Level - - Configure the verbosity of the logs. - - $$$administrator_logging$$$ - ![dsn editor logging](/explore-analyze/images/elasticsearch-reference-dsn_editor_logging.png "") - - When authentication is enabled, the password will be redacted from the logs. - - -::::{note} -Debug-logging can quickly lead to the creation of many very large files and generate significant processing overhead. Only enable if instructed so and preferably only when fetching low volumes of data. -:::: - - - -#### 2.7 Testing the connection [connection_testing] - -Once the *Hostname*, the *Port* (if different from implicit default) and the SSL options are configured, you can test if the provided parameters are correct by pressing the *Test Connection* button. This will instruct the driver to connect to the {{es}} instance and perform a simple SQL test query. (This will thus require a running {{es}} instance with the SQL plugin enabled.) - -$$$dsn_editor_conntest$$$ -![dsn editor conntest](/explore-analyze/images/elasticsearch-reference-dsn_editor_conntest.png "") - -::::{note} -When connection testing, all the configured parameters are taken into account, including the logging configuration. This will allow early detection of potential file/directory access rights conflicts. -:::: - - -See [Alternative logging configuration](#alternative_logging) section further for an alternative way of configuring the logging. - - -## 3. DSN is available [available-dsn] - -Once everything is in place, pressing the *Save* button will store the configuration into the chosen destination (Registry or file). - -Before saving a DSN configuration the provided file/directory paths are verified to be valid on the current system. The DSN editor will however not verify in any way the validity or reachability of the configured *Hostname* : *Port*. See [2.7 Testing the connection](#connection_testing) for an exhaustive check. - -If everything is correct, the name of the newly created DSN will be listed as available to use: - -$$$system_added$$$ -![administrator system added](/explore-analyze/images/elasticsearch-reference-administrator_system_added.png "") - - -## Alternative logging configuration [alternative_logging] - -Due to the specification of the ODBC API, the driver will receive the configured DSN parameters - including the logging ones - only once a connection API is invoked (such as *SQLConnect* or *SQLDriverConnect*). The *Driver Manager* will however always make a set of API calls into the driver before attempting to establish a connection. To capture those calls as well, one needs to pass logging configuration parameters in an alternative way. The Elasticsearch SQL ODBC Driver will use an environment variable for this purpose. - -Configuring an environment variable is OS specific and not detailed in this guide. Whether the variable should be configured system-wide or user-specific depends on the way the ODBC-enabled application is being run and if logging should affect the current user only or not. - -The definition of the environment variable needs to be done as follows: - -* Name: *ESODBC_LOG_DIR* -* Value: `[path](?[level])`, where: - - [path] is the path to the directory where the log files will be written into; - - [level] is optional and can take one of the following values: *debug*, *info*, *warn*, *error*; if not provided, *debug* is assumed. - - -$$$env_var_logging$$$ -![env var log](/explore-analyze/images/elasticsearch-reference-env_var_log.png "") - -::::{note} -When enabling the logging through the environment variable, the driver will create **one log file per process**. -:::: - - -Both ways of configuring the logging can coexist and both can use the same destination logging directory. However, one logging message will only be logged once, the connection logging taking precedence over the environment variable logging. - - diff --git a/explore-analyze/query-filter/languages/sql-odbc.md b/explore-analyze/query-filter/languages/sql-odbc.md deleted file mode 100644 index 10cb779850..0000000000 --- a/explore-analyze/query-filter/languages/sql-odbc.md +++ /dev/null @@ -1,24 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-odbc.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# SQL ODBC [sql-odbc] - - -## Overview [sql-odbc-overview] - -Elasticsearch SQL ODBC Driver is a 3.80 compliant ODBC driver for {{es}}. It is a core level driver, exposing all of the functionality accessible through the {{es}}'s SQL API, converting ODBC calls into Elasticsearch SQL. - -In order to make use of the driver, the server must have Elasticsearch SQL installed and running with the valid license. - -* [Driver installation](sql-odbc-installation.md) -* [Configuration](sql-odbc-setup.md) - - - diff --git a/explore-analyze/query-filter/languages/sql-overview.md b/explore-analyze/query-filter/languages/sql-overview.md deleted file mode 100644 index 63c68ecb51..0000000000 --- a/explore-analyze/query-filter/languages/sql-overview.md +++ /dev/null @@ -1,31 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-overview.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Overview [sql-overview] - -Elasticsearch SQL aims to provide a powerful yet lightweight SQL interface to {{es}}. - - -## Introduction [sql-introduction] - -Elasticsearch SQL is a feature that allows SQL-like queries to be executed in real-time against {{es}}. Whether using the REST interface, command-line or JDBC, any client can use SQL to search and aggregate data *natively* inside {{es}}. One can think of Elasticsearch SQL as a *translator*, one that understands both SQL and {{es}} and makes it easy to read and process data in real-time, at scale by leveraging {{es}} capabilities. - - -## Why Elasticsearch SQL ? [sql-why] - -Native integration -: Elasticsearch SQL is built from the ground up for {{es}}. Each and every query is efficiently executed against the relevant nodes according to the underlying storage. - -No external parts -: No need for additional hardware, processes, runtimes or libraries to query {{es}}; Elasticsearch SQL eliminates extra moving parts by running *inside* the {{es}} cluster. - -Lightweight and efficient -: Elasticsearch SQL does not abstract {{es}} and its search capabilities - on the contrary, it embraces and exposes SQL to allow proper full-text search, in real-time, in the same declarative, succinct fashion. - diff --git a/explore-analyze/query-filter/languages/sql-pagination.md b/explore-analyze/query-filter/languages/sql-pagination.md deleted file mode 100644 index 8de8136e69..0000000000 --- a/explore-analyze/query-filter/languages/sql-pagination.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-pagination.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Paginating through a large response [sql-pagination] - -Using the example from the [previous section](sql-rest-format.md), one can continue to the next page by sending back the cursor field. In the case of CSV, TSV and TXT formats, the cursor is returned in the `Cursor` HTTP header. - -```console -POST /_sql?format=json -{ - "cursor": "sDXF1ZXJ5QW5kRmV0Y2gBAAAAAAAAAAEWYUpOYklQMHhRUEtld3RsNnFtYU1hQQ==:BAFmBGRhdGUBZgVsaWtlcwFzB21lc3NhZ2UBZgR1c2Vy9f///w8=" -} -``` - -Which looks like: - -```console-result -{ - "rows" : [ - ["Dan Simmons", "Hyperion", 482, "1989-05-26T00:00:00.000Z"], - ["Iain M. Banks", "Consider Phlebas", 471, "1987-04-23T00:00:00.000Z"], - ["Neal Stephenson", "Snow Crash", 470, "1992-06-01T00:00:00.000Z"], - ["Frank Herbert", "God Emperor of Dune", 454, "1981-05-28T00:00:00.000Z"], - ["Frank Herbert", "Children of Dune", 408, "1976-04-21T00:00:00.000Z"] - ], - "cursor" : "sDXF1ZXJ5QW5kRmV0Y2gBAAAAAAAAAAEWODRMaXBUaVlRN21iTlRyWHZWYUdrdw==:BAFmBmF1dGhvcgFmBG5hbWUBZgpwYWdlX2NvdW50AWYMcmVsZWFzZV9kYXRl9f///w8=" -} -``` - -Note that the `columns` object is only part of the first page. - -You’ve reached the last page when there is no `cursor` returned in the results. Like Elasticsearch’s [scroll](elasticsearch://reference/elasticsearch/rest-apis/paginate-search-results.md#scroll-search-results), SQL may keep state in Elasticsearch to support the cursor. Unlike scroll, receiving the last page is enough to guarantee that the Elasticsearch state is cleared. - -To clear the state earlier, use the [clear cursor API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-clear-cursor): - -```console -POST /_sql/close -{ - "cursor": "sDXF1ZXJ5QW5kRmV0Y2gBAAAAAAAAAAEWYUpOYklQMHhRUEtld3RsNnFtYU1hQQ==:BAFmBGRhdGUBZgVsaWtlcwFzB21lc3NhZ2UBZgR1c2Vy9f///w8=" -} -``` - -Which will like return the - -```console-result -{ - "succeeded" : true -} -``` - diff --git a/explore-analyze/query-filter/languages/sql-rest-columnar.md b/explore-analyze/query-filter/languages/sql-rest-columnar.md deleted file mode 100644 index 1ed0242316..0000000000 --- a/explore-analyze/query-filter/languages/sql-rest-columnar.md +++ /dev/null @@ -1,69 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-rest-columnar.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Columnar results [sql-rest-columnar] - -The most well known way of displaying the results of an SQL query result in general is the one where each individual record/document represents one line/row. For certain formats, Elasticsearch SQL can return the results in a columnar fashion: one row represents all the values of a certain column from the current page of results. - -The following formats can be returned in columnar orientation: `json`, `yaml`, `cbor` and `smile`. - -```console -POST /_sql?format=json -{ - "query": "SELECT * FROM library ORDER BY page_count DESC", - "fetch_size": 5, - "columnar": true -} -``` - -Which returns: - -```console-result -{ - "columns": [ - {"name": "author", "type": "text"}, - {"name": "name", "type": "text"}, - {"name": "page_count", "type": "short"}, - {"name": "release_date", "type": "datetime"} - ], - "values": [ - ["Peter F. Hamilton", "Vernor Vinge", "Frank Herbert", "Alastair Reynolds", "James S.A. Corey"], - ["Pandora's Star", "A Fire Upon the Deep", "Dune", "Revelation Space", "Leviathan Wakes"], - [768, 613, 604, 585, 561], - ["2004-03-02T00:00:00.000Z", "1992-06-01T00:00:00.000Z", "1965-06-01T00:00:00.000Z", "2000-03-15T00:00:00.000Z", "2011-06-02T00:00:00.000Z"] - ], - "cursor": "sDXF1ZXJ5QW5kRmV0Y2gBAAAAAAAAAAEWWWdrRlVfSS1TbDYtcW9lc1FJNmlYdw==:BAFmBmF1dGhvcgFmBG5hbWUBZgpwYWdlX2NvdW50AWYMcmVsZWFzZV9kYXRl+v///w8=" -} -``` - -Any subsequent calls using a `cursor` still have to contain the `columnar` parameter to preserve the orientation, meaning the initial query will not *remember* the columnar option. - -```console -POST /_sql?format=json -{ - "cursor": "sDXF1ZXJ5QW5kRmV0Y2gBAAAAAAAAAAEWWWdrRlVfSS1TbDYtcW9lc1FJNmlYdw==:BAFmBmF1dGhvcgFmBG5hbWUBZgpwYWdlX2NvdW50AWYMcmVsZWFzZV9kYXRl+v///w8=", - "columnar": true -} -``` - -Which looks like: - -```console-result -{ - "values": [ - ["Dan Simmons", "Iain M. Banks", "Neal Stephenson", "Frank Herbert", "Frank Herbert"], - ["Hyperion", "Consider Phlebas", "Snow Crash", "God Emperor of Dune", "Children of Dune"], - [482, 471, 470, 454, 408], - ["1989-05-26T00:00:00.000Z", "1987-04-23T00:00:00.000Z", "1992-06-01T00:00:00.000Z", "1981-05-28T00:00:00.000Z", "1976-04-21T00:00:00.000Z"] - ], - "cursor": "46ToAwFzQERYRjFaWEo1UVc1a1JtVjBZMmdCQUFBQUFBQUFBQUVXWjBaNlFXbzNOV0pVY21Wa1NUZDJhV2t3V2xwblp3PT3/////DwQBZgZhdXRob3IBBHRleHQAAAFmBG5hbWUBBHRleHQAAAFmCnBhZ2VfY291bnQBBGxvbmcBAAFmDHJlbGVhc2VfZGF0ZQEIZGF0ZXRpbWUBAAEP" -} -``` - diff --git a/explore-analyze/query-filter/languages/sql-rest-filtering.md b/explore-analyze/query-filter/languages/sql-rest-filtering.md deleted file mode 100644 index 4bda896ca5..0000000000 --- a/explore-analyze/query-filter/languages/sql-rest-filtering.md +++ /dev/null @@ -1,56 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-rest-filtering.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Filtering using Elasticsearch Query DSL [sql-rest-filtering] - -One can filter the results that SQL will run on using a standard {{es}} Query DSL by specifying the query in the filter parameter. - -```console -POST /_sql?format=txt -{ - "query": "SELECT * FROM library ORDER BY page_count DESC", - "filter": { - "range": { - "page_count": { - "gte" : 100, - "lte" : 200 - } - } - }, - "fetch_size": 5 -} -``` - -Which returns: - -```text - author | name | page_count | release_date ----------------+------------------------------------+---------------+------------------------ -Douglas Adams |The Hitchhiker's Guide to the Galaxy|180 |1979-10-12T00:00:00.000Z -``` - -::::{tip} -A useful and less obvious usage for standard Query DSL filtering is to search documents by a specific [routing key](elasticsearch://reference/elasticsearch/rest-apis/search-shard-routing.md#search-routing). Because Elasticsearch SQL does not support a `routing` parameter, one can specify a [`terms` filter for the `_routing` field](elasticsearch://reference/elasticsearch/mapping-reference/mapping-routing-field.md) instead: - -```console -POST /_sql?format=txt -{ - "query": "SELECT * FROM library", - "filter": { - "terms": { - "_routing": ["abc"] - } - } -} -``` - -:::: - - diff --git a/explore-analyze/query-filter/languages/sql-rest-format.md b/explore-analyze/query-filter/languages/sql-rest-format.md deleted file mode 100644 index 49b4346de2..0000000000 --- a/explore-analyze/query-filter/languages/sql-rest-format.md +++ /dev/null @@ -1,204 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-rest-format.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Response Data Formats [sql-rest-format] - -While the textual format is nice for humans, computers prefer something more structured. - -Elasticsearch SQL can return the data in the following formats which can be set either through the `format` property in the URL or by setting the `Accept` HTTP header: - -::::{note} -The URL parameter takes precedence over the `Accept` HTTP header. If neither is specified then the response is returned in the same format as the request. -:::: - - -| format | `Accept` HTTP header | Description | -| --- | --- | --- | -| Human Readable | -| `csv` | `text/csv` | [Comma-separated values](https://en.wikipedia.org/wiki/Comma-separated_values) | -| `json` | `application/json` | [JSON](https://www.json.org/) (JavaScript Object Notation) human-readable format | -| `tsv` | `text/tab-separated-values` | [Tab-separated values](https://en.wikipedia.org/wiki/Tab-separated_values) | -| `txt` | `text/plain` | CLI-like representation | -| `yaml` | `application/yaml` | [YAML](https://en.wikipedia.org/wiki/YAML) (YAML Ain’t Markup Language) human-readable format | -| Binary Formats | -| `cbor` | `application/cbor` | [Concise Binary Object Representation](https://cbor.io/) | -| `smile` | `application/smile` | [Smile](https://en.wikipedia.org/wiki/Smile_(data_interchange_format)) binary data format similar to CBOR | - -The `CSV` format accepts a formatting URL query attribute, `delimiter`, which indicates which character should be used to separate the CSV values. It defaults to comma (`,`) and cannot take any of the following values: double quote (`"`), carriage-return (`\r`) and new-line (`\n`). The tab (`\t`) can also not be used, the `tsv` format needs to be used instead. - -Here are some examples for the human readable formats: - -## CSV [_csv] - -```console -POST /_sql?format=csv -{ - "query": "SELECT * FROM library ORDER BY page_count DESC", - "fetch_size": 5 -} -``` - -which returns: - -```text -author,name,page_count,release_date -Peter F. Hamilton,Pandora's Star,768,2004-03-02T00:00:00.000Z -Vernor Vinge,A Fire Upon the Deep,613,1992-06-01T00:00:00.000Z -Frank Herbert,Dune,604,1965-06-01T00:00:00.000Z -Alastair Reynolds,Revelation Space,585,2000-03-15T00:00:00.000Z -James S.A. Corey,Leviathan Wakes,561,2011-06-02T00:00:00.000Z -``` - -or: - -```console -POST /_sql?format=csv&delimiter=%3b -{ - "query": "SELECT * FROM library ORDER BY page_count DESC", - "fetch_size": 5 -} -``` - -which returns: - -```text -author;name;page_count;release_date -Peter F. Hamilton;Pandora's Star;768;2004-03-02T00:00:00.000Z -Vernor Vinge;A Fire Upon the Deep;613;1992-06-01T00:00:00.000Z -Frank Herbert;Dune;604;1965-06-01T00:00:00.000Z -Alastair Reynolds;Revelation Space;585;2000-03-15T00:00:00.000Z -James S.A. Corey;Leviathan Wakes;561;2011-06-02T00:00:00.000Z -``` - - -## JSON [_json] - -```console -POST /_sql?format=json -{ - "query": "SELECT * FROM library ORDER BY page_count DESC", - "fetch_size": 5 -} -``` - -Which returns: - -```console-result -{ - "columns": [ - {"name": "author", "type": "text"}, - {"name": "name", "type": "text"}, - {"name": "page_count", "type": "short"}, - {"name": "release_date", "type": "datetime"} - ], - "rows": [ - ["Peter F. Hamilton", "Pandora's Star", 768, "2004-03-02T00:00:00.000Z"], - ["Vernor Vinge", "A Fire Upon the Deep", 613, "1992-06-01T00:00:00.000Z"], - ["Frank Herbert", "Dune", 604, "1965-06-01T00:00:00.000Z"], - ["Alastair Reynolds", "Revelation Space", 585, "2000-03-15T00:00:00.000Z"], - ["James S.A. Corey", "Leviathan Wakes", 561, "2011-06-02T00:00:00.000Z"] - ], - "cursor": "sDXF1ZXJ5QW5kRmV0Y2gBAAAAAAAAAAEWWWdrRlVfSS1TbDYtcW9lc1FJNmlYdw==:BAFmBmF1dGhvcgFmBG5hbWUBZgpwYWdlX2NvdW50AWYMcmVsZWFzZV9kYXRl+v///w8=" -} -``` - - -## TSV [_tsv] - -```console -POST /_sql?format=tsv -{ - "query": "SELECT * FROM library ORDER BY page_count DESC", - "fetch_size": 5 -} -``` - -Which returns: - -```text -author name page_count release_date -Peter F. Hamilton Pandora's Star 768 2004-03-02T00:00:00.000Z -Vernor Vinge A Fire Upon the Deep 613 1992-06-01T00:00:00.000Z -Frank Herbert Dune 604 1965-06-01T00:00:00.000Z -Alastair Reynolds Revelation Space 585 2000-03-15T00:00:00.000Z -James S.A. Corey Leviathan Wakes 561 2011-06-02T00:00:00.000Z -``` - - -## TXT [_txt] - -```console -POST /_sql?format=txt -{ - "query": "SELECT * FROM library ORDER BY page_count DESC", - "fetch_size": 5 -} -``` - -Which returns: - -```text - author | name | page_count | release_date ------------------+--------------------+---------------+------------------------ -Peter F. Hamilton|Pandora's Star |768 |2004-03-02T00:00:00.000Z -Vernor Vinge |A Fire Upon the Deep|613 |1992-06-01T00:00:00.000Z -Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z -Alastair Reynolds|Revelation Space |585 |2000-03-15T00:00:00.000Z -James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00.000Z -``` - - -## YAML [_yaml] - -```console -POST /_sql?format=yaml -{ - "query": "SELECT * FROM library ORDER BY page_count DESC", - "fetch_size": 5 -} -``` - -Which returns: - -```yaml -columns: -- name: "author" - type: "text" -- name: "name" - type: "text" -- name: "page_count" - type: "short" -- name: "release_date" - type: "datetime" -rows: -- - "Peter F. Hamilton" - - "Pandora's Star" - - 768 - - "2004-03-02T00:00:00.000Z" -- - "Vernor Vinge" - - "A Fire Upon the Deep" - - 613 - - "1992-06-01T00:00:00.000Z" -- - "Frank Herbert" - - "Dune" - - 604 - - "1965-06-01T00:00:00.000Z" -- - "Alastair Reynolds" - - "Revelation Space" - - 585 - - "2000-03-15T00:00:00.000Z" -- - "James S.A. Corey" - - "Leviathan Wakes" - - 561 - - "2011-06-02T00:00:00.000Z" -cursor: "sDXF1ZXJ5QW5kRmV0Y2gBAAAAAAAAAAEWWWdrRlVfSS1TbDYtcW9lc1FJNmlYdw==:BAFmBmF1dGhvcgFmBG5hbWUBZgpwYWdlX2NvdW50AWYMcmVsZWFzZV9kYXRl+v///w8=" -``` - - diff --git a/explore-analyze/query-filter/languages/sql-rest-overview.md b/explore-analyze/query-filter/languages/sql-rest-overview.md deleted file mode 100644 index 562cf91289..0000000000 --- a/explore-analyze/query-filter/languages/sql-rest-overview.md +++ /dev/null @@ -1,45 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-rest-overview.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Overview [sql-rest-overview] - -The [SQL search API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-query) accepts SQL in a JSON document, executes it, and returns the results. For example: - -```console -POST /_sql?format=txt -{ - "query": "SELECT * FROM library ORDER BY page_count DESC LIMIT 5" -} -``` - -Which returns: - -```text - author | name | page_count | release_date ------------------+--------------------+---------------+------------------------ -Peter F. Hamilton|Pandora's Star |768 |2004-03-02T00:00:00.000Z -Vernor Vinge |A Fire Upon the Deep|613 |1992-06-01T00:00:00.000Z -Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z -Alastair Reynolds|Revelation Space |585 |2000-03-15T00:00:00.000Z -James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00.000Z -``` - -::::{admonition} Using Kibana Console -:class: tip - -:name: sql-kibana-console - -If you are using [Kibana Console](../tools/console.md) (which is highly recommended), take advantage of the triple quotes `"""` when creating the query. This not only automatically escapes double quotes (`"`) inside the query string but also support multi-line as shown below: - -![console triple quotes](/explore-analyze/images/elasticsearch-reference-console-triple-quotes.png "") - -:::: - - diff --git a/explore-analyze/query-filter/languages/sql-rest-params.md b/explore-analyze/query-filter/languages/sql-rest-params.md deleted file mode 100644 index deab7f3292..0000000000 --- a/explore-analyze/query-filter/languages/sql-rest-params.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-rest-params.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Passing parameters to a query [sql-rest-params] - -Using values in a query condition, for example, or in a `HAVING` statement can be done "inline", by integrating the value in the query string itself: - -```console -POST /_sql?format=txt -{ - "query": "SELECT YEAR(release_date) AS year FROM library WHERE page_count > 300 AND author = 'Frank Herbert' GROUP BY year HAVING COUNT(*) > 0" -} -``` - -or it can be done by extracting the values in a separate list of parameters and using question mark placeholders (`?`) in the query string: - -```console -POST /_sql?format=txt -{ - "query": "SELECT YEAR(release_date) AS year FROM library WHERE page_count > ? AND author = ? GROUP BY year HAVING COUNT(*) > ?", - "params": [300, "Frank Herbert", 0] -} -``` - -::::{important} -The recommended way of passing values to a query is with question mark placeholders, to avoid any attempts of hacking or SQL injection. -:::: - - diff --git a/explore-analyze/query-filter/languages/sql-rest.md b/explore-analyze/query-filter/languages/sql-rest.md deleted file mode 100644 index 8692a93048..0000000000 --- a/explore-analyze/query-filter/languages/sql-rest.md +++ /dev/null @@ -1,29 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-rest.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# SQL REST API [sql-rest] - -* [Overview](sql-rest-overview.md) -* [Response Data Formats](sql-rest-format.md) -* [Paginating through a large response](sql-pagination.md) -* [Filtering using {{es}} Query DSL](sql-rest-filtering.md) -* [Columnar results](sql-rest-columnar.md) -* [Passing parameters to a query](sql-rest-params.md) -* [Use runtime fields](sql-runtime-fields.md) -* [Run an async SQL search](sql-async.md) - - - - - - - - - diff --git a/explore-analyze/query-filter/languages/sql-runtime-fields.md b/explore-analyze/query-filter/languages/sql-runtime-fields.md deleted file mode 100644 index 3dc46ea049..0000000000 --- a/explore-analyze/query-filter/languages/sql-runtime-fields.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-runtime-fields.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Use runtime fields [sql-runtime-fields] - -Use the `runtime_mappings` parameter to extract and create [runtime fields](../../../manage-data/data-store/mapping/runtime-fields.md), or columns, from existing ones during a search. - -The following search creates a `release_day_of_week` runtime field from `release_date` and returns it in the response. - -```console -POST _sql?format=txt -{ - "runtime_mappings": { - "release_day_of_week": { - "type": "keyword", - "script": """ - emit(doc['release_date'].value.dayOfWeekEnum.toString()) - """ - } - }, - "query": """ - SELECT * FROM library WHERE page_count > 300 AND author = 'Frank Herbert' - """ -} -``` - -The API returns: - -```txt - author | name | page_count | release_date |release_day_of_week ----------------+---------------+---------------+------------------------+------------------- -Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z|TUESDAY -``` - diff --git a/explore-analyze/query-filter/languages/sql-security.md b/explore-analyze/query-filter/languages/sql-security.md deleted file mode 100644 index d6ec7eeb15..0000000000 --- a/explore-analyze/query-filter/languages/sql-security.md +++ /dev/null @@ -1,71 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-security.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# Security [sql-security] - -Elasticsearch SQL integrates with security, if this is enabled on your cluster. In such a scenario, Elasticsearch SQL supports both security at the transport layer (by encrypting the communication between the consumer and the server) and authentication (for the access layer). - - -## SSL/TLS configuration [ssl-tls-config] - -In case of an encrypted transport, the SSL/TLS support needs to be enabled in Elasticsearch SQL to properly establish communication with {{es}}. This is done by setting the `ssl` property to `true` or by using the `https` prefix in the URL.
Depending on your SSL configuration (whether the certificates are signed by a CA or not, whether they are global at JVM level or just local to one application), might require setting up the `keystore` and/or `truststore`, that is where the *credentials* are stored (`keystore` - which typically stores private keys and certificates) and how to *verify* them (`truststore` - which typically stores certificates from third party also known as CA - certificate authorities).
Typically (and again, do note that your environment might differ significantly), if the SSL setup for Elasticsearch SQL is not already done at the JVM level, one needs to setup the keystore if the Elasticsearch SQL security requires client authentication (PKI - Public Key Infrastructure), and setup `truststore` if SSL is enabled. - - -## Authentication [_authentication] - -The authentication support in Elasticsearch SQL is of two types: - -Username/Password -: Set these through `user` and `password` properties. - -PKI/X.509 -: Use X.509 certificates to authenticate Elasticsearch SQL to {{es}}. For this, one would need to setup the `keystore` containing the private key and certificate to the appropriate user (configured in {{es}}) and the `truststore` with the CA certificate used to sign the SSL/TLS certificates in the {{es}} cluster. That is, one should setup the key to authenticate Elasticsearch SQL and also to verify that is the right one. To do so, one should set the `ssl.keystore.location` and `ssl.truststore.location` properties to indicate the `keystore` and `truststore` to use. It is recommended to have these secured through a password in which case `ssl.keystore.pass` and `ssl.truststore.pass` properties are required. - - -## Permissions (server-side) [sql-security-permissions] - -On the server, one needs to add a few permissions to users so they can run SQL. To run SQL, a user needs `read` and `indices:admin/get` permissions at minimum while some parts of the API require `cluster:monitor/main`. - -You can add permissions by [creating a role](../../../deploy-manage/users-roles/cluster-or-deployment-auth/defining-roles.md), and assigning that role to the user. Roles can be created using {{kib}}, an [API call](#sql-role-api-example) or the [`roles.yml` configuration file](#sql-role-file-example). Using {{kib}} or the role management APIs is the preferred method for defining roles. File-based role management is useful if you want to define a role that doesn’t need to change. You cannot use the role management APIs to view or edit a role defined in `roles.yml`. - - -### Add permissions with the role management APIs [sql-role-api-example] - -This example configures a role that can run SQL in JDBC querying the `test` index: - -```console -POST /_security/role/cli_or_drivers_minimal -{ - "cluster": ["cluster:monitor/main"], - "indices": [ - { - "names": ["test"], - "privileges": ["read", "indices:admin/get"] - } - ] -} -``` - - -### Add permissions to `roles.yml` [sql-role-file-example] - -This example configures a role that can run SQL in JDBC querying the `test` and `bort` indices. Add the following to `roles.yml`: - -```yaml -cli_or_drivers_minimal: - cluster: - - "cluster:monitor/main" - indices: - - names: test - privileges: [read, "indices:admin/get"] - - names: bort - privileges: [read, "indices:admin/get"] -``` - diff --git a/explore-analyze/query-filter/languages/sql-translate.md b/explore-analyze/query-filter/languages/sql-translate.md deleted file mode 100644 index ef0d246cfb..0000000000 --- a/explore-analyze/query-filter/languages/sql-translate.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -mapped_pages: - - https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-translate.html -applies_to: - stack: ga - serverless: ga -products: - - id: elasticsearch ---- - -# SQL Translate API [sql-translate] - -The SQL Translate API accepts SQL in a JSON document and translates it into native {{es}} queries. For example: - -```console -POST /_sql/translate -{ - "query": "SELECT * FROM library ORDER BY page_count DESC", - "fetch_size": 10 -} -``` - -Which returns: - -```console-result -{ - "size": 10, - "_source": false, - "fields": [ - { - "field": "author" - }, - { - "field": "name" - }, - { - "field": "page_count" - }, - { - "field": "release_date", - "format": "strict_date_optional_time_nanos" - } - ], - "sort": [ - { - "page_count": { - "order": "desc", - "missing": "_first", - "unmapped_type": "short" - } - } - ], - "track_total_hits": -1 -} -``` - -Which is the request that SQL will run to provide the results. In this case, SQL will use the [scroll](elasticsearch://reference/elasticsearch/rest-apis/paginate-search-results.md#scroll-search-results) API. If the result contained an aggregation then SQL would use the normal [search API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-search). - -The request body accepts the same [parameters](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-query) as the [SQL search API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-sql-query), excluding `cursor`. - diff --git a/explore-analyze/query-filter/languages/sql.md b/explore-analyze/query-filter/languages/sql.md index b050b0c954..30c4f80895 100644 --- a/explore-analyze/query-filter/languages/sql.md +++ b/explore-analyze/query-filter/languages/sql.md @@ -1,6 +1,7 @@ --- mapped_pages: - https://www.elastic.co/guide/en/elasticsearch/reference/current/xpack-sql.html +navigation_title: SQL applies_to: stack: ga serverless: ga @@ -8,40 +9,60 @@ products: - id: elasticsearch --- -# SQL [xpack-sql] +# SQL overview [sql-overview] -{{es}} includes a SQL feature to execute SQL queries against indices and return results in tabular format. +Elasticsearch SQL aims to provide a powerful yet lightweight SQL interface to {{es}}. -The following chapters aim to cover everything from usage, to syntax and drivers. Experienced users might want to jump directly to the list of SQL [commands](elasticsearch://reference/query-languages/sql/sql-commands.md) and [functions](elasticsearch://reference/query-languages/sql/sql-functions.md). +## What's SQL in {{es}}? [sql-introduction] -[Overview](sql-overview.md) +Elasticsearch SQL is a feature that allows SQL-like queries to be executed in real-time against {{es}}. Whether using the REST interface, command-line or JDBC, any client can use SQL to search and aggregate data *natively* inside {{es}}. One can think of Elasticsearch SQL as a *translator*, one that understands both SQL and {{es}} and makes it easy to read and process data in real-time, at scale by leveraging {{es}} capabilities. + +## Why Elasticsearch SQL ? [sql-why] + +Native integration +: Elasticsearch SQL is built from the ground up for {{es}}. Each and every query is efficiently executed against the relevant nodes according to the underlying storage. + +No external parts +: No need for additional hardware, processes, runtimes or libraries to query {{es}}; Elasticsearch SQL eliminates extra moving parts by running *inside* the {{es}} cluster. + +Lightweight and efficient +: Elasticsearch SQL does not abstract {{es}} and its search capabilities - on the contrary, it embraces and exposes SQL to allow proper full-text search, in real-time, in the same declarative, succinct fashion. + + +## Reference documentation + +:::{note} + This overview page is in the Explore & Analyze section. All of the {{es}} SQL documentation lives in the **Reference** section. +::: + +[Overview](elasticsearch://reference/query-languages/sql.md) : Overview of Elasticsearch SQL and its features. -[Getting Started](sql-getting-started.md) +[Getting Started](elasticsearch://reference/query-languages/sql/sql-getting-started.md) : Start using SQL right away in {{es}}. -[Concepts and Terminology](sql-concepts.md) +[Concepts and Terminology](elasticsearch://reference/query-languages/sql/sql-concepts.md) : Language conventions across SQL and {{es}}. -[Security](sql-security.md) +[Security](elasticsearch://reference/query-languages/sql/sql-security.md) : Secure Elasticsearch SQL and {{es}}. -[REST API](sql-rest.md) +[REST API](elasticsearch://reference/query-languages/sql/sql-rest.md) : Execute SQL in JSON format over REST. -[Translate API](sql-translate.md) +[Translate API](elasticsearch://reference/query-languages/sql/sql-translate.md) : Translate SQL in JSON format to {{es}} native query. -[CLI](sql-cli.md) +[CLI](elasticsearch://reference/query-languages/sql/sql-cli.md) : Command-line application for executing SQL against {{es}}. -[JDBC](sql-jdbc.md) +[JDBC](elasticsearch://reference/query-languages/sql/sql-jdbc.md) : JDBC driver for {{es}}. -[ODBC](sql-odbc.md) +[ODBC](elasticsearch://reference/query-languages/sql/sql-odbc.md) : ODBC driver for {{es}}. -[Client Applications](sql-client-apps.md) +[Client Applications](elasticsearch://reference/query-languages/sql/sql-client-apps.md) : Setup various SQL/BI tools with Elasticsearch SQL. [SQL Language](elasticsearch://reference/query-languages/sql/sql-spec.md) diff --git a/explore-analyze/report-and-share.md b/explore-analyze/report-and-share.md index 25509ade3b..71e894cb26 100644 --- a/explore-analyze/report-and-share.md +++ b/explore-analyze/report-and-share.md @@ -129,7 +129,7 @@ We recommend using CSV reports to export moderate amounts of data only. The feat To work around the limitations, use filters to create multiple smaller reports, or extract the data you need directly with the Elasticsearch APIs. -For more information on using Elasticsearch APIs directly, see [Scroll API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-scroll), [Point in time API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-open-point-in-time), [ES|QL](elasticsearch://reference/query-languages/esql/esql-rest.md) or [SQL](/explore-analyze/query-filter/languages/sql-rest-format.md#_csv) with CSV response data format. We recommend that you use an official Elastic language client: details for each programming language library that Elastic provides are in the [{{es}} Client documentation](/reference/elasticsearch-clients/index.md). +For more information on using Elasticsearch APIs directly, see [Scroll API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-scroll), [Point in time API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-open-point-in-time), [ES|QL](elasticsearch://reference/query-languages/esql/esql-rest.md) or [SQL](elasticsearch://reference/query-languages/sql/sql-rest-format.md#_csv) with CSV response data format. We recommend that you use an official Elastic language client: details for each programming language library that Elastic provides are in the [{{es}} Client documentation](/reference/elasticsearch-clients/index.md). [Reporting parameters](kibana://reference/configuration-reference/reporting-settings.md) can be adjusted to overcome some of these limiting scenarios. Results are dependent on data size, availability, and latency factors and are not guaranteed. diff --git a/explore-analyze/report-and-share/reporting-troubleshooting-csv.md b/explore-analyze/report-and-share/reporting-troubleshooting-csv.md index 89dd5d135d..69bcafed57 100644 --- a/explore-analyze/report-and-share/reporting-troubleshooting-csv.md +++ b/explore-analyze/report-and-share/reporting-troubleshooting-csv.md @@ -26,7 +26,7 @@ We recommend using CSV reports to export moderate amounts of data only. The feat To work around the limitations, use filters to create multiple smaller reports, or extract the data you need directly with the Elasticsearch APIs. -For more information on using Elasticsearch APIs directly, see [Scroll API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-scroll), [Point in time API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-open-point-in-time), [ES|QL](elasticsearch://reference/query-languages/esql/esql-rest.md) or [SQL](../query-filter/languages/sql-rest-format.md#_csv) with CSV response data format. We recommend that you use an official Elastic language client: details for each programming language library that Elastic provides are in the [{{es}} Client documentation](/reference/elasticsearch-clients/index.md). +For more information on using Elasticsearch APIs directly, see [Scroll API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-scroll), [Point in time API](https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-open-point-in-time), [ES|QL](elasticsearch://reference/query-languages/esql/esql-rest.md) or [SQL](elasticsearch://reference/query-languages/sql/sql-rest-format.md#_csv) with CSV response data format. We recommend that you use an official Elastic language client: details for each programming language library that Elastic provides are in the [{{es}} Client documentation](/reference/elasticsearch-clients/index.md). [Reporting parameters](kibana://reference/configuration-reference/reporting-settings.md) can be adjusted to overcome some of these limiting scenarios. Results are dependent on data size, availability, and latency factors and are not guaranteed. diff --git a/explore-analyze/toc.yml b/explore-analyze/toc.yml index 690c214ef0..794b0b5dff 100644 --- a/explore-analyze/toc.yml +++ b/explore-analyze/toc.yml @@ -8,43 +8,6 @@ toc: - file: query-filter/languages/querydsl.md - file: query-filter/languages/esql-kibana.md - file: query-filter/languages/sql.md - children: - - file: query-filter/languages/sql-overview.md - - file: query-filter/languages/sql-getting-started.md - - file: query-filter/languages/sql-concepts.md - - file: query-filter/languages/sql-security.md - - file: query-filter/languages/sql-rest.md - children: - - file: query-filter/languages/sql-rest-overview.md - - file: query-filter/languages/sql-rest-format.md - - file: query-filter/languages/sql-pagination.md - - file: query-filter/languages/sql-rest-filtering.md - - file: query-filter/languages/sql-rest-columnar.md - - file: query-filter/languages/sql-rest-params.md - - file: query-filter/languages/sql-runtime-fields.md - - file: query-filter/languages/sql-async.md - - file: query-filter/languages/sql-translate.md - - file: query-filter/languages/sql-cli.md - - file: query-filter/languages/sql-jdbc.md - children: - - file: query-filter/languages/sql-jdbc-api-usage.md - - file: query-filter/languages/sql-odbc.md - children: - - file: query-filter/languages/sql-odbc-installation.md - - file: query-filter/languages/sql-odbc-setup.md - - file: query-filter/languages/sql-client-apps.md - children: - - file: query-filter/languages/sql-client-apps-dbeaver.md - - file: query-filter/languages/sql-client-apps-dbvis.md - - file: query-filter/languages/sql-client-apps-excel.md - - file: query-filter/languages/sql-client-apps-powerbi.md - - file: query-filter/languages/sql-client-apps-ps1.md - - file: query-filter/languages/sql-client-apps-microstrat.md - - file: query-filter/languages/sql-client-apps-qlik.md - - file: query-filter/languages/sql-client-apps-squirrel.md - - file: query-filter/languages/sql-client-apps-workbench.md - - file: query-filter/languages/sql-client-apps-tableau-desktop.md - - file: query-filter/languages/sql-client-apps-tableau-server.md - file: query-filter/languages/eql.md children: - file: query-filter/languages/example-detect-threats-with-eql.md diff --git a/redirects.yml b/redirects.yml index 33c9971abc..df7feba803 100644 --- a/redirects.yml +++ b/redirects.yml @@ -467,4 +467,44 @@ redirects: 'solutions/observability/apm/jaeger.md': 'solutions/observability/apm/ingest/jaeger.md' 'solutions/observability/apm/monitor-aws-lambda-functions.md': 'solutions/observability/apm/ingest/monitor-aws-lambda-functions.md' # Related to https://github.com/elastic/docs-content/pull/3142 - 'deploy-manage/deploy/self-managed/networkaddress-cache-ttl.md': 'deploy-manage/deploy/self-managed/important-settings-configuration.md' \ No newline at end of file + 'deploy-manage/deploy/self-managed/networkaddress-cache-ttl.md': 'deploy-manage/deploy/self-managed/important-settings-configuration.md' + +# Move all content for SQL under Explore & Analyze-> SQL to Reference content: related to https://github.com/elastic/docs-content/pull/3270 + 'explore-analyze/query-filter/languages/sql-overview.md': 'elasticsearch://reference/query-languages/sql/sql.md' + 'explore-analyze/query-filter/languages/sql-getting-started.md': 'elasticsearch://reference/query-languages/sql/sql-getting-started.md ' + 'explore-analyze/query-filter/languages/sql-concepts': 'elasticsearch://reference/query-languages/sql/sql-concepts' + 'explore-analyze/query-filter/languages/sql-security.md': 'elasticsearch://reference/query-languages/sql/sql-security.md' + 'explore-analyze/query-filter/languages/sql-rest.md': 'elasticsearch://reference/query-languages/sql/sql-rest.md' + 'explore-analyze/query-filter/languages/sql-rest-overview.md': 'elasticsearch://reference/query-languages/sql/sql-rest-overview.md' + 'explore-analyze/query-filter/languages/sql-rest-format.md': 'elasticsearch://reference/query-languages/sql/sql-rest-format.md' + 'explore-analyze/query-filter/languages/sql-pagination.md': 'elasticsearch://reference/query-languages/sql/sql-pagination.md' + 'explore-analyze/query-filter/languages/sql-rest-filtering.md': 'elasticsearch://reference/query-languages/sql/sql-rest-filtering.md' + 'explore-analyze/query-filter/languages/sql-rest-columnar.md': 'elasticsearch://reference/query-languages/sql/sql-rest-columnar.md' + 'explore-analyze/query-filter/languages/sql-rest-params.md': 'elasticsearch://reference/query-languages/sql/sql-rest-params.md' + 'explore-analyze/query-filter/languages/sql-runtime-fields.md': 'elasticsearch://reference/query-languages/sql/sql-runtime-fields.md' + 'explore-analyze/query-filter/languages/sql-async.md': 'elasticsearch://reference/query-languages/sql/sql-async.md' + 'explore-analyze/query-filter/languages/sql-translate.md': 'elasticsearch://reference/query-languages/sql/sql-translate.md' + 'explore-analyze/query-filter/languages/sql-cli.md': 'elasticsearch://reference/query-languages/sql/sql-cli.md' + 'explore-analyze/query-filter/languages/sql-jdbc.md': 'elasticsearch://reference/query-languages/sql/sql-jdbc.md' + 'explore-analyze/query-filter/languages/sql-jdbc-api-usage.md': 'elasticsearch://reference/query-languages/sql/sql-jdbc-api-usage.md' + 'explore-analyze/query-filter/languages/sql-odbc.md': 'elasticsearch://reference/query-languages/sql/sql-odbc.md' + 'explore-analyze/query-filter/languages/sql-odbc-installation.md': 'elasticsearch://reference/query-languages/sql/sql-odbc-installation.md' + 'explore-analyze/query-filter/languages/sql-odbc-setup.md': 'elasticsearch://reference/query-languages/sql/sql-odbc-setup.md' + 'explore-analyze/query-filter/languages/sql-client-apps.md': 'elasticsearch://reference/query-languages/sql/sql-client-apps.md' + 'explore-analyze/query-filter/languages/sql-client-apps-dbeaver.md': 'elasticsearch://reference/query-languages/sql/sql-client-apps-dbeaver.md' + 'explore-analyze/query-filter/languages/sql-client-apps-dbvis.md': 'elasticsearch://reference/query-languages/sql/sql-client-apps-dbvis.md' + 'explore-analyze/query-filter/languages/sql-client-apps-excel.md': 'elasticsearch://reference/query-languages/sql/sql-client-apps-excel.md' + 'explore-analyze/query-filter/languages/sql-client-apps-powerbi.md': 'elasticsearch://reference/query-languages/sql/sql-client-apps-powerbi.md' + 'explore-analyze/query-filter/languages/sql-client-apps-ps1.md': 'elasticsearch://reference/query-languages/sql/sql-client-apps-ps1.md' + 'explore-analyze/query-filter/languages/sql-client-apps-microstrat.md': 'elasticsearch://reference/query-languages/sql/sql-client-apps-microstrat.md' + 'explore-analyze/query-filter/languages/sql-client-apps-qlik.md': 'elasticsearch://reference/query-languages/sql/sql-client-apps-qlik.md' + 'explore-analyze/query-filter/languages/sql-client-apps-squirrel.md': 'elasticsearch://reference/query-languages/sql/sql-client-apps-squirrel.md' + 'explore-analyze/query-filter/languages/sql-client-apps-workbench.md': 'elasticsearch://reference/query-languages/sql/sql-client-apps-workbench.md' + 'explore-analyze/query-filter/languages/sql-client-apps-tableau-desktop.md': 'elasticsearch://reference/query-languages/sql/sql-client-apps-tableau-desktop.md' + 'explore-analyze/query-filter/languages/sql-client-apps-tableau-server.md': 'elasticsearch://reference/query-languages/sql/sql-client-apps-tableau-server.md' + + + + + +