Skip to content

Commit

Permalink
[Docs] Update docstrings and copyright (#1759)
Browse files Browse the repository at this point in the history
  • Loading branch information
george0st committed Feb 17, 2022
1 parent 6ba4e98 commit bc67da8
Show file tree
Hide file tree
Showing 8 changed files with 18 additions and 7 deletions.
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def current_version():
# -- Project information -----------------------------------------------------

project = "mlrun"
copyright = "2021, Iguazio"
copyright = "2022, Iguazio"
author = "Iguazio"

master_doc = "index"
Expand Down
6 changes: 3 additions & 3 deletions mlrun/api/crud/model_endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def delete_endpoint_record(
"""
Deletes the KV record of a given model endpoint, project and endpoint_id are used for lookup
:param auth_info: The required auth information for doing the deletion
:param auth_info: The auth info of the request
:param project: The name of the project
:param endpoint_id: The id of the endpoint
:param access_key: access key with permission to delete
Expand Down Expand Up @@ -200,7 +200,7 @@ def list_endpoints(
and depends on the 'start' and 'end' parameters. By default, when the metrics parameter is None, no metrics are
added to the output of this function.
:param access_key: V3IO access key for managing user permissions
:param auth_info: The auth info of the request
:param project: The name of the project
:param model: The name of the model to filter by
:param function: The name of the function to filter by
Expand Down Expand Up @@ -278,7 +278,7 @@ def get_endpoint(
"""
Returns a ModelEndpoint object with additional metrics and feature related data.
:param auth_info: The required auth information for doing the deletion
:param auth_info: The auth info of the request
:param project: The name of the project
:param endpoint_id: The id of the model endpoint
:param metrics: A list of metrics to return for each endpoint, read more in 'TimeMetric'
Expand Down
3 changes: 2 additions & 1 deletion mlrun/execution.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def get_store_resource(self, url):
feature_vector = context.get_store_resource("store://feature-vectors/default/myvec")
dataset = context.get_store_resource("store://artifacts/default/mydata")
:param uri: store resource uri/path, store://<type>/<project>/<name>:<version>
:param url: store resource uri/path, store://<type>/<project>/<name>:<version>
types: artifacts | feature-sets | feature-vectors
"""
return get_store_resource(url, db=self._rundb, secrets=self._secrets_manager)
Expand Down Expand Up @@ -613,6 +613,7 @@ def log_artifact(
:param src_path: deprecated, use local_path
:param upload: upload to datastore (default is True)
:param labels: a set of key/value labels to tag the artifact with
:param format: optional, format to use (e.g. csv, parquet, ..)
:param db_key: the key to use in the artifact DB table, by default
its run name + '_' + key
db_key=False will not register it in the artifacts table
Expand Down
5 changes: 5 additions & 0 deletions mlrun/projects/operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ def my_pipe(url=""):
:param watch: watch/follow run log, True by default
:param local: run the function locally vs on the runtime/cluster
:param verbose: add verbose prints/logs
:param project_object: override the project object to use, will default to the project set in the runtime context.
:param auto_build: when set to True and the function require build it will be built on the first
function run, use only if you dont plan on changing the build config between runs
Expand Down Expand Up @@ -194,6 +195,9 @@ def build_function(
:param secret_name: k8s secret for accessing the docker registry
:param requirements: list of python packages or pip requirements file path, defaults to None
:param mlrun_version_specifier: which mlrun package version to include (if not current)
:param builder_env: Kaniko builder pod env vars dict (for config/credentials)
e.g. builder_env={"GIT_TOKEN": token}, does not work yet in KFP
:param project_object: override the project object to use, will default to the project set in the runtime context.
:param builder_env: Kaniko builder pod env vars dict (for config/credentials)
e.g. builder_env={"GIT_TOKEN": token}, does not work yet in KFP
"""
Expand Down Expand Up @@ -266,6 +270,7 @@ def deploy_function(
:param env: dict of extra environment variables
:param tag: extra version tag
:param verbose add verbose prints/logs
:param project_object: override the project object to use, will default to the project set in the runtime context.
"""
engine, function = _get_engine_and_function(function, project_object)
if function.kind not in mlrun.runtimes.RuntimeKinds.nuclio_runtimes():
Expand Down
5 changes: 3 additions & 2 deletions mlrun/projects/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -1414,6 +1414,7 @@ def func(self, key, sync=False) -> mlrun.runtimes.BaseRuntime:
def get_function(self, key, sync=False, enrich=False) -> mlrun.runtimes.BaseRuntime:
"""get function object by name
:param key: name of key for search
:param sync: will reload/reinit the function
:param enrich: add project info/config/source info to the function object
Expand Down Expand Up @@ -1539,7 +1540,7 @@ def with_secrets(self, kind, source, prefix=""):
Vault secret source has several options::
proj.with_secrets('vault', {'user': <user name>, 'secrets': ['secret1', 'secret2' ...]})
proj.with_secrets('vault', {'project': <proj. name>, 'secrets': ['secret1', 'secret2' ...]})
proj.with_secrets('vault', {'project': <proj.name>, 'secrets': ['secret1', 'secret2' ...]})
proj.with_secrets('vault', ['secret1', 'secret2' ...])
The 2nd option uses the current project name as context.
Expand Down Expand Up @@ -1760,7 +1761,7 @@ def save_workflow(self, name, target, artifact_path=None, ttl=None):
:param artifact_path:
target path/url for workflow artifacts, the string
'{{workflow.uid}}' will be replaced by workflow id
:param ttl: pipeline ttl in secs (after that the pods will be removed)
:param ttl: pipeline ttl (time to live) in secs (after that the pods will be removed)
"""
if not name or name not in self.spec._workflows:
raise ValueError(f"workflow {name} not found")
Expand Down
1 change: 1 addition & 0 deletions mlrun/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -908,6 +908,7 @@ def run_pipeline(
:param pipeline: KFP pipeline function or path to .yaml/.zip pipeline file
:param arguments: pipeline arguments
:param project: name of project
:param experiment: experiment name
:param run: optional, run name
:param namespace: Kubernetes namespace (if not using default)
Expand Down
2 changes: 2 additions & 0 deletions mlrun/runtimes/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -801,6 +801,8 @@ def as_step(
:param params: input parameters (dict)
:param hyperparams: hyper parameters
:param selector: selection criteria for hyper params
:param hyper_param_options: hyper param options (selector, early stop, strategy, ..)
see: :py:class:`~mlrun.model.HyperParamOptions`
:param inputs: input objects (dict of key: path)
:param outputs: list of outputs which can pass in the workflow
:param artifact_path: default artifact output path (replace out_path)
Expand Down
1 change: 1 addition & 0 deletions mlrun/serving/states.py
Original file line number Diff line number Diff line change
Expand Up @@ -534,6 +534,7 @@ def add_route(
:param class_name: class name to build the route step from (when route is not provided)
:param class_args: class init arguments
:param handler: class handler to invoke on run/event
:param function: function this step should run in
"""

if not route and not class_name:
Expand Down

0 comments on commit bc67da8

Please sign in to comment.