diff --git a/lib/pbench/cli/server/__init__.py b/lib/pbench/cli/server/__init__.py index fef103774c..108f8307ac 100644 --- a/lib/pbench/cli/server/__init__.py +++ b/lib/pbench/cli/server/__init__.py @@ -1,8 +1,11 @@ import datetime from threading import Thread import time +from typing import Any, Optional import click +from click import Context, Parameter, ParamType +from dateutil import parser from pbench.server import PbenchServerConfig from pbench.server.database import init_db @@ -30,13 +33,13 @@ def __bool__(self) -> bool: return self.detail def error(self, message: str): - """Write a message if details are enabled. + """Write an error message if error details are enabled. Args: message: Detail string """ if self.errors: - click.secho(f"|| {message}", fg="red") + click.secho(f"|E| {message}", fg="red", err=True) def message(self, message: str): """Write a message if details are enabled. @@ -45,7 +48,16 @@ def message(self, message: str): message: Detail string """ if self.detail: - click.echo(f"|| {message}") + click.echo(f"|I| {message}") + + def warning(self, message: str): + """Write a warning message if error details are enabled. + + Args: + message: Detail string + """ + if self.errors: + click.secho(f"|W| {message}", fg="blue", err=True) class Verify: @@ -75,7 +87,7 @@ def status(self, message: str): """ if self.verify: ts = datetime.datetime.now().astimezone() - click.secho(f"({ts:%H:%M:%S}) {message}", fg="green") + click.secho(f"({ts:%H:%M:%S}) {message}", fg="green", err=True) class Watch: @@ -120,10 +132,33 @@ def watcher(self): hours, remainder = divmod(delta, 3600) minutes, seconds = divmod(remainder, 60) click.secho( - f"[{hours:02d}:{minutes:02d}:{seconds:02d}] {self.status}", fg="cyan" + f"[{hours:02d}:{minutes:02d}:{seconds:02d}] {self.status}", + fg="cyan", + err=True, ) +class DateParser(ParamType): + """The DateParser type converts date strings into `datetime` objects. + + This is a variant of click's built-in DateTime parser, but uses the + more flexible dateutil.parser + """ + + name = "dateparser" + + def convert( + self, value: Any, param: Optional[Parameter], ctx: Optional[Context] + ) -> Any: + if isinstance(value, datetime.datetime): + return value + + try: + return parser.parse(value) + except Exception as e: + self.fail(f"{value!r} cannot be converted to a datetime: {str(e)!r}") + + def config_setup(context: object) -> PbenchServerConfig: config = PbenchServerConfig.create(context.config) # We're going to need the DB to track dataset state, so setup DB access. diff --git a/lib/pbench/cli/server/reindex.py b/lib/pbench/cli/server/reindex.py new file mode 100644 index 0000000000..103bd62ef3 --- /dev/null +++ b/lib/pbench/cli/server/reindex.py @@ -0,0 +1,318 @@ +from collections import defaultdict +from typing import Any, Optional +from urllib.parse import urljoin + +import click +import requests +from sqlalchemy.orm import Query + +from pbench.cli import pass_cli_context +from pbench.cli.server import config_setup, DateParser, Detail, Verify, Watch +from pbench.cli.server.options import common_options +from pbench.common.logger import get_pbench_logger +from pbench.server import BadConfig +from pbench.server.database.database import Database +from pbench.server.database.models.datasets import ( + Dataset, + OperationName, + OperationState, +) +from pbench.server.database.models.index_map import IndexMap +from pbench.server.database.models.server_settings import ( + OPTION_SERVER_INDEXING, + ServerSetting, +) +from pbench.server.sync import Sync + +detailer: Optional[Detail] = None +watcher: Optional[Watch] = None +verifier: Optional[Verify] = None + + +# Options that select datasets +SELECTORS = set(("since", "until", "id", "name")) + + +# Options that do something to datasets +OPERATORS = set(("reindex", "delete", "list")) + + +def datasets(options: dict[str, Any]) -> Query[Dataset]: + """Return a filtered query to select datasets + + NOTE: filters are all AND-ed, so there's no way to select by id OR name. + This could be handled in various means that are awkward in click, e.g. + with a "mode switch" and callback handling to push the selector option + values into "or" or "and" lists. While I could see that being "cool" I + don't see enough value to bother with it now. + + Args: + options: command options and values + + Returns: + a Filtered SQLAlchemy Query + """ + query = ( + Database.db_session.query(Dataset) + .execution_options(stream_results=True) + .yield_per(2000) + ) + filters = [] + ids = options.get("id") + if ids: + verifier.status(f"Filter resource IDs match {ids}") + filters.append(Dataset.resource_id.in_(ids)) + names = options.get("name") + if names: + verifier.status(f"Filter names match {names}") + filters.append(Dataset.name.in_(names)) + since = options.get("since") + until = options.get("until") + if since: + verifier.status(f"Filter since {since}") + filters.append(Dataset.uploaded >= since) + if until: + verifier.status(f"Filter until {until}") + filters.append(Dataset.uploaded <= until) + return query.filter(*filters) + + +def opensearch( + operation: str, dataset: Dataset, options: dict[str, Any], params: dict[str, str] +) -> Optional[requests.Response]: + es_url, ca_bundle = options.get("_es") + indices = ",".join(IndexMap.indices(dataset)) + if not indices: + if not options.get("reindex"): + detailer.warning(f"{dataset.name} is not indexed") + return None + url = indices + "/" + operation + json = { + "query": { + "dis_max": { + "queries": [ + {"term": {"run.id": dataset.resource_id}}, + {"term": {"run_data_parent": dataset.resource_id}}, + ] + } + } + } + url = urljoin(es_url, url) + response = requests.post(url, json=json, params=params, verify=ca_bundle) + return response + + +def summarize_index(dataset: Dataset, options: dict[str, Any]): + """A simple Opensearch query to report index statistics + + Args: + dataset: a Dataset object + options: command options and values + """ + try: + response = opensearch( + "_search", + dataset, + options, + {"ignore_unavailable": "true", "_source": "false"}, + ) + if response is None: + return + if response.ok: + json = response.json() + indices = defaultdict(int) + hits = json["hits"]["hits"] + for h in hits: + indices[h["_index"]] += 1 + click.echo( + f"{dataset.name}: {len(hits)} indexed documents in {len(indices)} indices" + ) + if detailer.detail: + for index, count in indices.items(): + click.echo(f" {count:<10,d} {index}") + else: + if response.headers["content-type"] == "application/json": + message = response.json() + else: + message = response.text + detailer.error( + f"{dataset.name} error querying index: ({response.status_code}) {message}" + ) + except Exception as e: + if verifier.verify: + raise + detailer.error(f"{dataset.name} error querying index: {str(e)!r}") + click.echo(f"{dataset.name} exception: {str(e)!r}") + + +def delete_index(dataset: Dataset, sync: Sync, options: dict[str, Any]): + """A simple Opensearch query to delete dataset indexed data + + Args: + dataset: a Dataset + sync: a Sync object to set the index as "working" + options: command options and values + """ + sync.update( + dataset, OperationState.WORKING, message="pbench-reindex is deleting index" + ) + es_url, ca_bundle = options.get("_es") + state = OperationState.OK + message = "Index deleted by pbench-reindex" + try: + response = opensearch( + "_delete_by_query", dataset, options, {"ignore_unavailable": "true"} + ) + if response is None: + message = "Not indexed" + return + if response.ok: + detailer.message(f"{dataset.name} indices successfully deleted") + else: + if response.headers["content-type"] == "application/json": + message = response.json() + else: + message = response.text + detailer.error( + f"{dataset.name} index can't be deleted: ({response.status_code}) {message}" + ) + except Exception as e: + detailer.error(f"{dataset.name} error deleting index: {str(e)!r}") + state = OperationState.WARNING + message = f"Index partially deleted by pbench-reindex: {str(e)!r}" + finally: + sync.update(dataset, state, message=message) + + +def worker(options: dict[str, Any]): + """Handle the important work that slacker parser passes off. + + Args: + options: command options and values + """ + + sync = Sync(options.get("_logger"), OperationName.INDEX) + + to_delete = [] + to_sync = [] + try: + for dataset in datasets(options): + watcher.update(f"Checking {dataset.name}") + if options.get("list"): + summarize_index(dataset, options) + if options.get("delete") or options.get("reindex"): + # Delete the indices and remove IndexMaps: for re-index, we want + # to be sure there's no existing index. + delete_index(dataset, sync, options) + to_delete.append(dataset) + if options.get("reindex"): + to_sync.append(dataset) + finally: + # Defer index-map deletion outside of the SQL generator loop to avoid + # breaking the SQLAlchemy cursor -- and we don't want to enable indexing + # until after we've removed the old index map. + for dataset in to_delete: + IndexMap.delete(dataset) + for dataset in to_sync: + sync.update( + dataset, + OperationState.READY, + message="Indexing enabled by pbench-reindex", + ) + + +@click.command(name="pbench-reindex") +@pass_cli_context +@click.option( + "--delete", default=False, is_flag=True, help="Delete index for selected dataset(s)" +) +@click.option( + "--detail", + default=False, + is_flag=True, + help="Provide extra diagnostic information", +) +@click.option( + "--errors", + default=False, + is_flag=True, + help="Show individual dataset errors", +) +@click.option("--id", type=str, multiple=True, help="Select dataset by resource ID") +@click.option( + "--indexing", + type=click.Choice(["enable", "disable"], case_sensitive=False), + help="Enable or disable the Pbench Server indexer for future uploads", +) +@click.option( + "--list", default=False, is_flag=True, help="Show dataset indexing status" +) +@click.option("--name", type=str, multiple=True, help="Select dataset by name") +@click.option( + "--progress", type=float, default=0.0, help="Show periodic progress messages" +) +@click.option( + "--reindex", is_flag=True, default=False, help="Reindex selected datasets" +) +@click.option( + "--since", + type=DateParser(), + help="Select datasets uploaded on or after specified date/time", +) +@click.option( + "--until", + type=DateParser(), + help="Select datasets uploaded on or before specified date/time", +) +@click.option( + "--verify", "-v", default=False, is_flag=True, help="Display detailed messages" +) +@common_options +def reindex(context: object, **kwargs): + """Control dataset indexing. + + This can globally enable or disable indexing for the server when datasets + are uploaded. + + It can also report on the indexing status of datasets, as well as deleting + a dataset's indexed documents and (re-)indexing datasets. + \f + + Args: + context: click context + kwargs: click options + """ + global detailer, verifier, watcher + detailer = Detail(kwargs.get("detail"), kwargs.get("errors")) + verifier = Verify(kwargs.get("verify")) + watcher = Watch(kwargs.get("progress")) + + try: + config = config_setup(context) + logger = get_pbench_logger("pbench-reindex", config) + es_url = config.get("Indexing", "uri") + ca_bundle = config.get("Indexing", "ca_bundle") + kwargs["_es"] = (es_url, ca_bundle) + kwargs["_logger"] = logger + + # Check whether to enable or disable automatic indexing on upload. + indexing = kwargs.get("indexing") + if indexing: + state = indexing == "enable" + detailer.message(f"{indexing} upload indexing") + ServerSetting.set(key=OPTION_SERVER_INDEXING, value=state) + + # Operate on individual datasets if selected + if (SELECTORS | OPERATORS) & set(k for k, v in kwargs.items() if v): + verifier.status("updating selected datasets") + worker(kwargs) + else: + click.echo("nothing to do", err=True) + rv = 0 + except Exception as exc: + if verifier.verify: + raise + click.secho(exc, err=True, bg="red") + rv = 2 if isinstance(exc, BadConfig) else 1 + + click.get_current_context().exit(rv) diff --git a/lib/pbench/cli/server/report.py b/lib/pbench/cli/server/report.py index 028f32d4e8..6f80a9074f 100644 --- a/lib/pbench/cli/server/report.py +++ b/lib/pbench/cli/server/report.py @@ -456,7 +456,7 @@ def report_states(): detailer.error(f"{dataset} doesn't have operational state") else: operations[operation][state] += 1 - if state == "FAILED": + if state in ("FAILED", "WARNING"): detailer.error(f"{operation} {state} for {dataset}: {message!r}") if operation == "INDEX": match = index_pattern.match(message) diff --git a/lib/pbench/server/api/resources/intake_base.py b/lib/pbench/server/api/resources/intake_base.py index e782023735..ed428edea8 100644 --- a/lib/pbench/server/api/resources/intake_base.py +++ b/lib/pbench/server/api/resources/intake_base.py @@ -47,6 +47,10 @@ OperationName, OperationState, ) +from pbench.server.database.models.server_settings import ( + OPTION_SERVER_INDEXING, + ServerSetting, +) from pbench.server.sync import Sync from pbench.server.utils import UtcTimeHelper @@ -591,7 +595,10 @@ def _intake( # Finally, update the operational state and Audit success. try: # Determine whether we should enable the INDEX operation. - should_index = not metadata.get(Metadata.SERVER_ARCHIVE, False) + should_index = bool(ServerSetting.get(OPTION_SERVER_INDEXING).value) + should_index = should_index and not metadata.get( + Metadata.SERVER_ARCHIVE, False + ) enable_next = [OperationName.INDEX] if should_index else None if not should_index: notes.append("Indexing is disabled by 'archive only' setting.") diff --git a/lib/pbench/server/database/alembic/versions/558608818623_warningstate.py b/lib/pbench/server/database/alembic/versions/558608818623_warningstate.py new file mode 100644 index 0000000000..12d4b68f1b --- /dev/null +++ b/lib/pbench/server/database/alembic/versions/558608818623_warningstate.py @@ -0,0 +1,27 @@ +"""Add WARNING operation status + +Allow "partial success" audit status. + +Revision ID: 558608818623 +Revises: ffcc6daffedb +Create Date: 2024-04-03 12:07:47.018612 + +""" +from alembic import op + +# revision identifiers, used by Alembic. +revision = "558608818623" +down_revision = "ffcc6daffedb" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.execute("ALTER TYPE operationstate ADD VALUE 'WARNING'") + + +def downgrade() -> None: + # Downgrade is problematic, and won't be attempted. Having unused ENUM + # values defined shouldn't represent a problem. + pass + # ### end Alembic commands ### diff --git a/lib/pbench/server/database/models/datasets.py b/lib/pbench/server/database/models/datasets.py index 66ce3d5633..b683bb94aa 100644 --- a/lib/pbench/server/database/models/datasets.py +++ b/lib/pbench/server/database/models/datasets.py @@ -487,6 +487,7 @@ class OperationState(enum.Enum): WORKING = enum.auto() OK = enum.auto() FAILED = enum.auto() + WARNING = enum.auto() class Operation(Database.Base): diff --git a/lib/pbench/server/database/models/index_map.py b/lib/pbench/server/database/models/index_map.py index 1d3775c30b..b9b188db49 100644 --- a/lib/pbench/server/database/models/index_map.py +++ b/lib/pbench/server/database/models/index_map.py @@ -109,6 +109,26 @@ def create(cls, dataset: Dataset, map: IndexMapType): cls.commit(dataset, "create") + @classmethod + def delete(cls, dataset: Dataset, root: Optional[str] = None): + """Delete the indices matching the specified root index name. + + Args: + dataset: Dataset object + root: Root index name + + Raises: + IndexMapSqlError: problem interacting with Database + """ + filters = [IndexMap.dataset == dataset] + if root: + filters.append(IndexMap.root == root) + try: + Database.db_session.query(IndexMap).filter(*filters).delete() + cls.commit(dataset, "delete") + except SQLAlchemyError as e: + raise IndexMapSqlError(e, operation="delete", dataset=dataset, name=root) + @classmethod def merge(cls, dataset: Dataset, merge_map: IndexMapType): """Merge two index maps, generated by distinct phases of indexing. diff --git a/lib/pbench/server/database/models/server_settings.py b/lib/pbench/server/database/models/server_settings.py index f99f18398e..d981ba3ee0 100644 --- a/lib/pbench/server/database/models/server_settings.py +++ b/lib/pbench/server/database/models/server_settings.py @@ -162,9 +162,17 @@ def validate_server_banner(key: str, value: JSONVALUE) -> JSONVALUE: return value +def validate_server_indexing(key: str, value: bool) -> bool: + if not isinstance(value, bool): + raise ServerSettingBadValue(key, value) + return value + + OPTION_DATASET_LIFETIME = "dataset-lifetime" OPTION_SERVER_BANNER = "server-banner" OPTION_SERVER_STATE = "server-state" +OPTION_SERVER_INDEXING = "server-indexing" + SERVER_SETTINGS_OPTIONS = { OPTION_DATASET_LIFETIME: { @@ -179,6 +187,10 @@ def validate_server_banner(key: str, value: JSONVALUE) -> JSONVALUE: "validator": validate_server_state, "default": lambda: {STATE_STATUS_KEY: STATE_STATUS_KEYWORD_ENABLED}, }, + OPTION_SERVER_INDEXING: { + "validator": validate_server_indexing, + "default": lambda: True, + }, } diff --git a/lib/pbench/test/unit/server/database/test_index_map_db.py b/lib/pbench/test/unit/server/database/test_index_map_db.py index b711c705e2..b0aef97ccf 100644 --- a/lib/pbench/test/unit/server/database/test_index_map_db.py +++ b/lib/pbench/test/unit/server/database/test_index_map_db.py @@ -25,6 +25,33 @@ def test_create(self, db_session, attach_dataset): assert IndexMap.exists(drb) is True assert sorted(IndexMap.indices(drb, "run-toc")) == sorted(map["run-toc"]) + def test_delete(self, db_session, attach_dataset): + """Test index map deletion""" + + map = { + "run-data": ["prefix.run-data.2023-07"], + "run-misc": ["prefix.run-misc.2024-03"], + "run-toc": ["prefix.run-toc.2023-06", "prefix.run-toc.2023-07"], + } + drb = Dataset.query(name="drb") + assert IndexMap.exists(drb) is False + IndexMap.create(drb, map) + assert IndexMap.exists(drb) is True + assert [ + "prefix.run-data.2023-07", + "prefix.run-misc.2024-03", + "prefix.run-toc.2023-06", + "prefix.run-toc.2023-07", + ] == sorted(IndexMap.indices(drb)) + IndexMap.delete(drb, "run-misc") + assert [ + "prefix.run-data.2023-07", + "prefix.run-toc.2023-06", + "prefix.run-toc.2023-07", + ] == sorted(IndexMap.indices(drb)) + IndexMap.delete(drb) + assert IndexMap.exists(drb) is False + @pytest.mark.parametrize("m1,m2", ((0, 1), (1, 0))) def test_merge(self, db_session, attach_dataset, m1, m2): """Test index map merge @@ -137,7 +164,7 @@ def fake_commit(): message ), f"{str(e.value)!r} doesn't start with {message!r}" - def test_delete(self, db_session, create_user): + def test_delete_dataset(self, db_session, create_user): """Test index map deletion with dataset We create a "scratch" dataset so that deleting it doesn't diff --git a/lib/pbench/test/unit/server/database/test_server_settings.py b/lib/pbench/test/unit/server/database/test_server_settings.py index c4596f2a7e..c5e9620925 100644 --- a/lib/pbench/test/unit/server/database/test_server_settings.py +++ b/lib/pbench/test/unit/server/database/test_server_settings.py @@ -163,6 +163,7 @@ def test_get_all_default(self): """ assert ServerSetting.get_all() == { "dataset-lifetime": "3650", + "server-indexing": True, "server-state": {"status": "enabled"}, "server-banner": None, } @@ -174,14 +175,21 @@ def test_get_all(self): c1 = ServerSetting.create(key="dataset-lifetime", value="2") c2 = ServerSetting.create(key="server-state", value={"status": "enabled"}) c3 = ServerSetting.create(key="server-banner", value={"message": "Mine"}) + c4 = ServerSetting.create(key="server-indexing", value=False) assert ServerSetting.get_all() == { "dataset-lifetime": "2", + "server-indexing": False, "server-state": {"status": "enabled"}, "server-banner": {"message": "Mine"}, } self.session.check_session( queries=1, - committed=[FakeRow.clone(c1), FakeRow.clone(c2), FakeRow.clone(c3)], + committed=[ + FakeRow.clone(c1), + FakeRow.clone(c2), + FakeRow.clone(c3), + FakeRow.clone(c4), + ], filters=[], ) @@ -286,6 +294,13 @@ def test_bad_state(self, value): ServerSetting.create(key="server-state", value=value) assert exc.value.value == value + @pytest.mark.parametrize("bad", (0, 1, "yes", [])) + def test_bad_indexing(self, bad): + """Verify rejection of non-boolean values""" + with pytest.raises(ServerSettingBadValue) as exc: + ServerSetting.create(key="server-indexing", value=bad) + assert exc.value.value == bad + @pytest.mark.parametrize("value", [1, True, "yes", ["a", "b"], {"banner": "xyzzy"}]) def test_bad_banner(self, value): """ diff --git a/lib/pbench/test/unit/server/test_server_settings.py b/lib/pbench/test/unit/server/test_server_settings.py index c1d2cfde45..defb6ad61e 100644 --- a/lib/pbench/test/unit/server/test_server_settings.py +++ b/lib/pbench/test/unit/server/test_server_settings.py @@ -63,7 +63,7 @@ def query_api( def test_get_bad_keys(self, query_get): response = query_get("xyzzy", HTTPStatus.BAD_REQUEST) assert response.json == { - "message": "Unrecognized keyword ['xyzzy'] for parameter key; allowed keywords are ['dataset-lifetime', 'server-banner', 'server-state']" + "message": "Unrecognized keyword ['xyzzy'] for parameter key; allowed keywords are ['dataset-lifetime', 'server-banner', 'server-indexing', 'server-state']" } def test_get1(self, query_get): @@ -80,6 +80,7 @@ def test_get_all(self, query_get, key): response = query_get(key) assert response.json == { "dataset-lifetime": "3650", + "server-indexing": True, "server-state": {"status": "enabled"}, "server-banner": None, } @@ -109,7 +110,7 @@ def test_put_missing_value(self, query_put): def test_put_bad_key(self, query_put): response = query_put(key="fookey", expected_status=HTTPStatus.BAD_REQUEST) assert response.json == { - "message": "Unrecognized keyword ['fookey'] for parameter key; allowed keywords are ['dataset-lifetime', 'server-banner', 'server-state']" + "message": "Unrecognized keyword ['fookey'] for parameter key; allowed keywords are ['dataset-lifetime', 'server-banner', 'server-indexing', 'server-state']" } def test_put_bad_keys(self, query_put): @@ -119,7 +120,7 @@ def test_put_bad_keys(self, query_put): expected_status=HTTPStatus.BAD_REQUEST, ) assert response.json == { - "message": "Unrecognized server settings ['fookey'] specified: valid settings are ['dataset-lifetime', 'server-banner', 'server-state']" + "message": "Unrecognized server settings ['fookey'] specified: valid settings are ['dataset-lifetime', 'server-banner', 'server-indexing', 'server-state']" } @pytest.mark.parametrize( @@ -127,6 +128,7 @@ def test_put_bad_keys(self, query_put): [ ("dataset-lifetime", "14 years"), ("server-banner", {"banner": None}), + ("server-indexing", "bad"), ("server-state", "running"), ("server-state", {"status": "disabled"}), ], @@ -229,6 +231,7 @@ def test_put_config(self, query_get, query_put): response = query_get(None) assert response.json == { "dataset-lifetime": "2", + "server-indexing": True, "server-state": {"status": "enabled"}, "server-banner": None, } diff --git a/server/Makefile b/server/Makefile index b28b7228a2..b8a0316dc1 100644 --- a/server/Makefile +++ b/server/Makefile @@ -21,6 +21,7 @@ INSTALLOPTS = --directory click-scripts = \ pbench-audit \ + pbench-reindex \ pbench-report-generator \ pbench-tree-manage \ pbench-user-create \ diff --git a/setup.cfg b/setup.cfg index ca2adcc887..8be6d2b920 100644 --- a/setup.cfg +++ b/setup.cfg @@ -29,17 +29,18 @@ console_scripts = pbench-clear-results = pbench.cli.agent.commands.results.clear:main pbench-clear-tools = pbench.cli.agent.commands.tools.clear:main pbench-config = pbench.cli.agent.commands.conf:main - pbench-report-generator = pbench.cli.server.report:report - pbench-tree-manage = pbench.cli.server.tree_manage:tree_manage pbench-is-local = pbench.cli.agent.commands.is_local:main pbench-list-tools = pbench.cli.agent.commands.tools.list:main pbench-list-triggers = pbench.cli.agent.commands.triggers.list:main pbench-register-tool-trigger = pbench.cli.agent.commands.triggers.register:main + pbench-reindex = pbench.cli.server.reindex:reindex + pbench-report-generator = pbench.cli.server.report:report pbench-results-move = pbench.cli.agent.commands.results.move:main pbench-results-push = pbench.cli.agent.commands.results.push:main pbench-server = pbench.cli.server.shell:main pbench-server-config = pbench.cli.server.conf:main pbench-tools-kill = pbench.cli.agent.commands.tools.kill:main + pbench-tree-manage = pbench.cli.server.tree_manage:tree_manage pbench-user-create = pbench.cli.server.user_management:user_create pbench-user-delete = pbench.cli.server.user_management:user_delete pbench-user-list = pbench.cli.server.user_management:user_list