From 5ba036b5fe06063e043fcbbfe569fc378992d2ae Mon Sep 17 00:00:00 2001 From: crflynn Date: Thu, 13 Sep 2018 22:09:23 -0400 Subject: [PATCH] initial commit --- .gitignore | 7 + HISTORY.rst | 7 + LICENSE.txt | 21 +++ MANIFEST.in | 3 + Pipfile | 15 +++ Pipfile.lock | 239 ++++++++++++++++++++++++++++++++++ README.rst | 131 +++++++++++++++++++ databricks_api/__init__.py | 1 + databricks_api/__version__.py | 9 ++ databricks_api/databricks.py | 23 ++++ generate_docs.py | 84 ++++++++++++ setup.cfg | 2 + setup.py | 39 ++++++ 13 files changed, 581 insertions(+) create mode 100644 .gitignore create mode 100644 HISTORY.rst create mode 100644 LICENSE.txt create mode 100644 MANIFEST.in create mode 100644 Pipfile create mode 100644 Pipfile.lock create mode 100644 README.rst create mode 100644 databricks_api/__init__.py create mode 100644 databricks_api/__version__.py create mode 100644 databricks_api/databricks.py create mode 100644 generate_docs.py create mode 100644 setup.cfg create mode 100644 setup.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..20d166e --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +*.sh +.DS_Store +.idea/ +__pycache__ +build/ +dist/ +*.egg-info diff --git a/HISTORY.rst b/HISTORY.rst new file mode 100644 index 0000000..43f5cdb --- /dev/null +++ b/HISTORY.rst @@ -0,0 +1,7 @@ +Release History +--------------- + +0.1.0: 2018-09-13 +~~~~~~~~~~~~~~~~~ + +* First release diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..9e8efda --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Christopher Flynn + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..a33decb --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,3 @@ +include README.rst +include LICENSE.txt +include HISTORY.rst diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000..bcafad3 --- /dev/null +++ b/Pipfile @@ -0,0 +1,15 @@ +[[source]] + +url = "https://pypi.python.org/simple" +verify_ssl = true +name = "pypi" + + +[packages] + +databricks-cli = "*" + + +[dev-packages] + +sphinx = "*" diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 0000000..41ee01e --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,239 @@ +{ + "_meta": { + "hash": { + "sha256": "4305365f46e2004285ff42ff840b007425489e24240533622b692799ae8151e1" + }, + "host-environment-markers": { + "implementation_name": "cpython", + "implementation_version": "3.6.5", + "os_name": "posix", + "platform_machine": "x86_64", + "platform_python_implementation": "CPython", + "platform_release": "17.3.0", + "platform_system": "Darwin", + "platform_version": "Darwin Kernel Version 17.3.0: Thu Nov 9 18:09:22 PST 2017; root:xnu-4570.31.3~1/RELEASE_X86_64", + "python_full_version": "3.6.5", + "python_version": "3.6", + "sys_platform": "darwin" + }, + "pipfile-spec": 6, + "requires": {}, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.python.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "certifi": { + "hashes": [ + "sha256:456048c7e371c089d0a77a5212fb37a2c2dce1e24146e3b7e0261736aaeaa22a", + "sha256:376690d6f16d32f9d1fe8932551d80b23e9d393a8578c5633a2ed39a64861638" + ], + "version": "==2018.8.24" + }, + "chardet": { + "hashes": [ + "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691", + "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae" + ], + "version": "==3.0.4" + }, + "click": { + "hashes": [ + "sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d", + "sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b" + ], + "version": "==6.7" + }, + "configparser": { + "hashes": [ + "sha256:5308b47021bc2340965c371f0f058cc6971a04502638d4244225c49d80db273a" + ], + "version": "==3.5.0" + }, + "databricks-cli": { + "hashes": [ + "sha256:c1c0b29497d3cd203615eba83afc9fbca3688d545cf3b6fcef4199f9b83b3036", + "sha256:9e5b8c06834d44dd7e986f6fc4788529619aa439ffb664ec6bfb255b979a6b17" + ], + "version": "==0.8.2" + }, + "idna": { + "hashes": [ + "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", + "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" + ], + "version": "==2.7" + }, + "requests": { + "hashes": [ + "sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1", + "sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a" + ], + "version": "==2.19.1" + }, + "six": { + "hashes": [ + "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb", + "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9" + ], + "version": "==1.11.0" + }, + "tabulate": { + "hashes": [ + "sha256:e4ca13f26d0a6be2a2915428dc21e732f1e44dad7f76d7030b2ef1ec251cf7f2" + ], + "version": "==0.8.2" + }, + "urllib3": { + "hashes": [ + "sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5", + "sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf" + ], + "version": "==1.23" + } + }, + "develop": { + "alabaster": { + "hashes": [ + "sha256:674bb3bab080f598371f4443c5008cbfeb1a5e622dd312395d2d82af2c54c456", + "sha256:b63b1f4dc77c074d386752ec4a8a7517600f6c0db8cd42980cae17ab7b3275d7" + ], + "version": "==0.7.11" + }, + "babel": { + "hashes": [ + "sha256:6778d85147d5d85345c14a26aada5e478ab04e39b078b0745ee6870c2b5cf669", + "sha256:8cba50f48c529ca3fa18cf81fa9403be176d374ac4d60738b839122dfaaa3d23" + ], + "version": "==2.6.0" + }, + "certifi": { + "hashes": [ + "sha256:456048c7e371c089d0a77a5212fb37a2c2dce1e24146e3b7e0261736aaeaa22a", + "sha256:376690d6f16d32f9d1fe8932551d80b23e9d393a8578c5633a2ed39a64861638" + ], + "version": "==2018.8.24" + }, + "chardet": { + "hashes": [ + "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691", + "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae" + ], + "version": "==3.0.4" + }, + "docutils": { + "hashes": [ + "sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6", + "sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", + "sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274" + ], + "version": "==0.14" + }, + "idna": { + "hashes": [ + "sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e", + "sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16" + ], + "version": "==2.7" + }, + "imagesize": { + "hashes": [ + "sha256:3f349de3eb99145973fefb7dbe38554414e5c30abd0c8e4b970a7c9d09f3a1d8", + "sha256:f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5" + ], + "version": "==1.1.0" + }, + "jinja2": { + "hashes": [ + "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", + "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4" + ], + "version": "==2.10" + }, + "markupsafe": { + "hashes": [ + "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665" + ], + "version": "==1.0" + }, + "packaging": { + "hashes": [ + "sha256:e9215d2d2535d3ae866c3d6efc77d5b24a0192cce0ff20e42896cc0664f889c0", + "sha256:f019b770dd64e585a99714f1fd5e01c7a8f11b45635aa953fd41c689a657375b" + ], + "version": "==17.1" + }, + "pygments": { + "hashes": [ + "sha256:78f3f434bcc5d6ee09020f92ba487f95ba50f1e3ef83ae96b9d5ffa1bab25c5d", + "sha256:dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc" + ], + "version": "==2.2.0" + }, + "pyparsing": { + "hashes": [ + "sha256:fee43f17a9c4087e7ed1605bd6df994c6173c1e977d7ade7b651292fab2bd010", + "sha256:0832bcf47acd283788593e7a0f542407bd9550a55a8a8435214a1960e04bcb04", + "sha256:9e8143a3e15c13713506886badd96ca4b579a87fbdf49e550dbfc057d6cb218e", + "sha256:281683241b25fe9b80ec9d66017485f6deff1af5cde372469134b56ca8447a07", + "sha256:b8b3117ed9bdf45e14dcc89345ce638ec7e0e29b2b579fa1ecf32ce45ebac8a5", + "sha256:8f1e18d3fd36c6795bb7e02a39fd05c611ffc2596c1e0d995d34d67630426c18", + "sha256:e4d45427c6e20a59bf4f88c639dcc03ce30d193112047f94012102f235853a58" + ], + "version": "==2.2.0" + }, + "pytz": { + "hashes": [ + "sha256:a061aa0a9e06881eb8b3b2b43f05b9439d6583c206d0a6c340ff72a7b6669053", + "sha256:ffb9ef1de172603304d9d2819af6f5ece76f2e85ec10692a524dd876e72bf277" + ], + "version": "==2018.5" + }, + "requests": { + "hashes": [ + "sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1", + "sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a" + ], + "version": "==2.19.1" + }, + "six": { + "hashes": [ + "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb", + "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9" + ], + "version": "==1.11.0" + }, + "snowballstemmer": { + "hashes": [ + "sha256:9f3bcd3c401c3e862ec0ebe6d2c069ebc012ce142cce209c098ccb5b09136e89", + "sha256:919f26a68b2c17a7634da993d91339e288964f93c274f1343e3bbbe2096e1128" + ], + "version": "==1.2.1" + }, + "sphinx": { + "hashes": [ + "sha256:c091dbdd5cc5aac6eb95d591a819fd18bccec90ffb048ec465b165a48b839b45", + "sha256:95acd6648902333647a0e0564abdb28a74b0a76d2333148aa35e5ed1f56d3c4b" + ], + "version": "==1.8.0" + }, + "sphinxcontrib-websupport": { + "hashes": [ + "sha256:68ca7ff70785cbe1e7bccc71a48b5b6d965d79ca50629606c7861a21b206d9dd", + "sha256:9de47f375baf1ea07cdb3436ff39d7a9c76042c10a769c52353ec46e4e8fc3b9" + ], + "version": "==1.1.0" + }, + "urllib3": { + "hashes": [ + "sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5", + "sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf" + ], + "version": "==1.23" + } + } +} diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..983fd7b --- /dev/null +++ b/README.rst @@ -0,0 +1,131 @@ +databricks-api +============== + +*[This documentation is auto-generated]* + +This package provides a simplified interface for the Databricks REST API. +The interface is autogenerated on instantiation using the underlying client +library used in the official `databricks-cli` python package. + +The docs here describe the interface for version **0.8.2** of +the `databricks-cli` package for API version **2.0**. +Assuming there are no major changes to the `databricks-cli` package +structure, this package should continue to work without a required update. + +The `databricks-api` package contains a `DatabricksAPI` class which provides +instance attributes for the `databricks-cli` ApiClient, as well as each of +the available service instances. The attributes of a `DatabricksAPI` instance are: + +* DatabricksAPI.client ** +* DatabricksAPI.jobs ** +* DatabricksAPI.cluster ** +* DatabricksAPI.managed_library ** +* DatabricksAPI.dbfs ** +* DatabricksAPI.workspace ** +* DatabricksAPI.secret ** +* DatabricksAPI.groups ** + +To instantiate the client, provide the databricks host and either a token or +user and password. Also shown is the full signature of the +underlying `ApiClient.__init__` + +.. code-block:: python + + from databricks_api import DatabricksAPI + + // Provide a host and token + db = DatabricksAPI(host="example.cloud.databricks.com", token="dpapi123...") + + // OR a host and user and password + db = DatabricksAPI(host="example.cloud.databricks.com", user="me@example.com", password="password") + + // Full __init__ signature + db = DatabricksAPI(user=None, password=None, host=None, token=None, apiVersion=2.0, default_headers={}, verify=True, command_name='') + +Refer to the `official documentation `_ +on the functionality and required arguments of each method below. + +Each of the service instance attributes provides the following public methods: + +**DatabricksAPI.jobs** + +* DatabricksAPI.jobs.cancel_run(run_id) +* DatabricksAPI.jobs.create_job(name=None, existing_cluster_id=None, new_cluster=None, libraries=None, email_notifications=None, timeout_seconds=None, max_retries=None, min_retry_interval_millis=None, retry_on_timeout=None, schedule=None, notebook_task=None, spark_jar_task=None, spark_python_task=None, spark_submit_task=None, max_concurrent_runs=None) +* DatabricksAPI.jobs.delete_job(job_id) +* DatabricksAPI.jobs.delete_run(run_id=None) +* DatabricksAPI.jobs.export_run(run_id, views_to_export=None) +* DatabricksAPI.jobs.get_job(job_id) +* DatabricksAPI.jobs.get_run(run_id=None) +* DatabricksAPI.jobs.get_run_output(run_id) +* DatabricksAPI.jobs.list_jobs() +* DatabricksAPI.jobs.list_runs(job_id=None, active_only=None, completed_only=None, offset=None, limit=None) +* DatabricksAPI.jobs.reset_job(job_id, new_settings) +* DatabricksAPI.jobs.run_now(job_id=None, jar_params=None, notebook_params=None, python_params=None, spark_submit_params=None) +* DatabricksAPI.jobs.submit_run(run_name=None, existing_cluster_id=None, new_cluster=None, libraries=None, notebook_task=None, spark_jar_task=None, spark_python_task=None, spark_submit_task=None, timeout_seconds=None) + +**DatabricksAPI.cluster** + +* DatabricksAPI.cluster.create_cluster(num_workers=None, autoscale=None, cluster_name=None, spark_version=None, spark_conf=None, aws_attributes=None, node_type_id=None, driver_node_type_id=None, ssh_public_keys=None, custom_tags=None, cluster_log_conf=None, spark_env_vars=None, autotermination_minutes=None, enable_elastic_disk=None, cluster_source=None) +* DatabricksAPI.cluster.delete_cluster(cluster_id) +* DatabricksAPI.cluster.edit_cluster(cluster_id, num_workers=None, autoscale=None, cluster_name=None, spark_version=None, spark_conf=None, aws_attributes=None, node_type_id=None, driver_node_type_id=None, ssh_public_keys=None, custom_tags=None, cluster_log_conf=None, spark_env_vars=None, autotermination_minutes=None, enable_elastic_disk=None, cluster_source=None) +* DatabricksAPI.cluster.get_cluster(cluster_id) +* DatabricksAPI.cluster.list_available_zones() +* DatabricksAPI.cluster.list_clusters() +* DatabricksAPI.cluster.list_node_types() +* DatabricksAPI.cluster.list_spark_versions() +* DatabricksAPI.cluster.resize_cluster(cluster_id, num_workers=None, autoscale=None) +* DatabricksAPI.cluster.restart_cluster(cluster_id) +* DatabricksAPI.cluster.start_cluster(cluster_id) + +**DatabricksAPI.managed_library** + +* DatabricksAPI.managed_library.all_cluster_statuses() +* DatabricksAPI.managed_library.cluster_status(cluster_id) +* DatabricksAPI.managed_library.install_libraries(cluster_id, libraries=None) +* DatabricksAPI.managed_library.uninstall_libraries(cluster_id, libraries=None) + +**DatabricksAPI.dbfs** + +* DatabricksAPI.dbfs.add_block(handle, data) +* DatabricksAPI.dbfs.close(handle) +* DatabricksAPI.dbfs.create(path, overwrite=None) +* DatabricksAPI.dbfs.delete(path, recursive=None) +* DatabricksAPI.dbfs.get_status(path) +* DatabricksAPI.dbfs.list(path) +* DatabricksAPI.dbfs.mkdirs(path) +* DatabricksAPI.dbfs.move(source_path, destination_path) +* DatabricksAPI.dbfs.put(path, contents=None, overwrite=None) +* DatabricksAPI.dbfs.read(path, offset=None, length=None) + +**DatabricksAPI.workspace** + +* DatabricksAPI.workspace.delete(path, recursive=None) +* DatabricksAPI.workspace.export_workspace(path, format=None, direct_download=None) +* DatabricksAPI.workspace.get_status(path) +* DatabricksAPI.workspace.import_workspace(path, format=None, language=None, content=None, overwrite=None) +* DatabricksAPI.workspace.list(path) +* DatabricksAPI.workspace.mkdirs(path) + +**DatabricksAPI.secret** + +* DatabricksAPI.secret.create_scope(scope, initial_manage_principal=None) +* DatabricksAPI.secret.delete_acl(scope, principal) +* DatabricksAPI.secret.delete_scope(scope) +* DatabricksAPI.secret.delete_secret(scope, key) +* DatabricksAPI.secret.get_acl(scope, principal) +* DatabricksAPI.secret.list_acls(scope) +* DatabricksAPI.secret.list_scopes() +* DatabricksAPI.secret.list_secrets(scope) +* DatabricksAPI.secret.put_acl(scope, principal, permission) +* DatabricksAPI.secret.put_secret(scope, key, string_value=None, bytes_value=None) + +**DatabricksAPI.groups** + +* DatabricksAPI.groups.add_to_group(parent_name, user_name=None, group_name=None) +* DatabricksAPI.groups.create_group(group_name) +* DatabricksAPI.groups.get_group_members(group_name) +* DatabricksAPI.groups.get_groups() +* DatabricksAPI.groups.get_groups_for_principal(user_name=None, group_name=None) +* DatabricksAPI.groups.remove_from_group(parent_name, user_name=None, group_name=None) +* DatabricksAPI.groups.remove_group(group_name) + diff --git a/databricks_api/__init__.py b/databricks_api/__init__.py new file mode 100644 index 0000000..f2e7c06 --- /dev/null +++ b/databricks_api/__init__.py @@ -0,0 +1 @@ +from .databricks import DatabricksAPI diff --git a/databricks_api/__version__.py b/databricks_api/__version__.py new file mode 100644 index 0000000..24b5746 --- /dev/null +++ b/databricks_api/__version__.py @@ -0,0 +1,9 @@ +"""Version information.""" +__title__ = "databricks-api" +__description__ = "Databricks API client interface." +__url__ = "https://github.com/crflynn/databricks-api" +__version__ = "0.1.0" +__author__ = "Christopher Flynn" +__author_email__ = "crflynn@users.noreply.github.com" +__license__ = "MIT" +__copyright__ = "Copyright 2018 Christopher Flynn" diff --git a/databricks_api/databricks.py b/databricks_api/databricks.py new file mode 100644 index 0000000..c762639 --- /dev/null +++ b/databricks_api/databricks.py @@ -0,0 +1,23 @@ +import re + +from databricks_cli.sdk import ApiClient +import databricks_cli.sdk.service as services + + +def camel_to_snake(name): + s = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) + return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s).lower() + + +class DatabricksAPI(object): + def __init__(self, **kwargs): + if "host" in kwargs: + if not kwargs["host"].startswith("https://"): + kwargs["host"] = "https://" + kwargs["host"] + + self.client = ApiClient(**kwargs) + + for service_name, service in services.__dict__.items(): + if "Service" in service_name: + name = camel_to_snake(service_name[:-7]) + setattr(self, name, service(self.client)) diff --git a/generate_docs.py b/generate_docs.py new file mode 100644 index 0000000..b8bb3a4 --- /dev/null +++ b/generate_docs.py @@ -0,0 +1,84 @@ +import inspect + +from databricks_api import DatabricksAPI +import databricks_cli + + +db = DatabricksAPI(host="localhost", token="token") + + +intro = """databricks-api +============== + +*[This documentation is auto-generated]* + +This package provides a simplified interface for the Databricks REST API. +The interface is autogenerated on instantiation using the underlying client +library used in the official `databricks-cli` python package. + +The docs here describe the interface for version **{version}** of +the `databricks-cli` package for API version **{api_version}**. +Assuming there are no major changes to the `databricks-cli` package +structure, this package should continue to work without a required update. + +The `databricks-api` package contains a `DatabricksAPI` class which provides +instance attributes for the `databricks-cli` ApiClient, as well as each of +the available service instances. The attributes of a `DatabricksAPI` instance are: + +""".format( + version=databricks_cli.version.version, + api_version=databricks_cli.sdk.version.API_VERSION, +) + +attrs = [] + +for k, v in db.__dict__.items(): + attrs.append("* DatabricksAPI." + k + " *<" + v.__class__.__module__ + "." + v.__class__.__name__ + ">*\n") + +middle = """ +To instantiate the client, provide the databricks host and either a token or +user and password. Also shown is the full signature of the +underlying `ApiClient.__init__` + +.. code-block:: python + + from databricks_api import DatabricksAPI + + // Provide a host and token + db = DatabricksAPI(host="example.cloud.databricks.com", token="dpapi123...") + + // OR a host and user and password + db = DatabricksAPI(host="example.cloud.databricks.com", user="me@example.com", password="password") + + // Full __init__ signature + {instantiate} + +Refer to the `official documentation `_ +on the functionality and required arguments of each method below. + +Each of the service instance attributes provides the following public methods: + +""".format(instantiate="db = DatabricksAPI" + str(inspect.signature(databricks_cli.sdk.ApiClient))) + +services = [] +for k, v in db.__dict__.items(): + if k == "client": + continue + print(k, v) + services.append("**DatabricksAPI." + k + "**\n\n") + methods = inspect.getmembers(v, predicate=inspect.ismethod) + print(methods) + for method in methods: + print(method) + if not method[0].startswith("_"): + services.append("* " + "DatabricksAPI." + k + "." + method[0] + str(inspect.signature(method[1])) + "\n") + services.append("\n") + + +with open("README.rst", "w") as f: + f.write(intro) + for a in attrs: + f.write(a) + f.write(middle) + for s in services: + f.write(s) diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..3480374 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal=1 \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..9d3acd1 --- /dev/null +++ b/setup.py @@ -0,0 +1,39 @@ +"""Setup.""" +import io +from os import path +from setuptools import setup + + +here = path.abspath(path.dirname(__file__)) + +# io.open for py27 +with io.open(path.join(here, "README.rst"), encoding="utf-8") as f: + long_description = f.read() + +# import __version__ attributes +about = {} +with open(path.join(here, "databricks_api", "__version__.py")) as f: + exec(f.read(), about) + +setup( + name=about["__title__"], + version=about["__version__"], + description=about["__description__"], + long_description=long_description, + url=about["__url__"], + author=about["__author__"], + author_email=about["__author_email__"], + license=about["__license__"], + classifiers=[ + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: Implementation :: CPython", + ], + keywords="databricks api client", + packages=["databricks_api"], + install_requires=["databricks-cli"], + include_package_data=False, + zip_safe=False, +)