Skip to content

Commit

Permalink
Issue 5666 - CLI - Add timeout parameter for tasks
Browse files Browse the repository at this point in the history
Description:  Add a timeout argument for all dsconf tasks

relates: #5666

Reviewed by: spichugi & jchapman(Thanks!!)
  • Loading branch information
mreynolds389 committed Feb 20, 2023
1 parent fa344b9 commit 26eaa1b
Show file tree
Hide file tree
Showing 14 changed files with 351 additions and 61 deletions.
219 changes: 219 additions & 0 deletions dirsrvtests/tests/suites/clu/dsconf_tasks_test.py
@@ -0,0 +1,219 @@
# --- BEGIN COPYRIGHT BLOCK ---
# Copyright (C) 2023 Red Hat, Inc.
# All rights reserved.
#
# License: GPL (version 3 or any later version).
# See LICENSE for details.
# --- END COPYRIGHT BLOCK ---
#
import logging
import pytest
import os
from lib389._constants import DEFAULT_SUFFIX
# from lib389.topologies import topology_m1 as topo
from lib389.topologies import topology_st as topo
from lib389.tasks import (ImportTask, ExportTask, BackupTask, RestoreTask, AutomemberRebuildMembershipTask,
AutomemberAbortRebuildTask, MemberUidFixupTask, MemberOfFixupTask, USNTombstoneCleanupTask,
DBCompactTask, EntryUUIDFixupTask, SchemaReloadTask, SyntaxValidateTask,
FixupLinkedAttributesTask, DBCompactTask)
from lib389.plugins import USNPlugin, POSIXWinsyncPlugin, LinkedAttributesPlugin, AutoMembershipPlugin, MemberOfPlugin
from lib389.dbgen import dbgen_users
from lib389.idm.user import UserAccount
from lib389.idm.group import Groups
from lib389.idm.posixgroup import PosixGroups # not sure if this is need yet MARK

log = logging.getLogger(__name__)


def test_task_timeout(topo):
"""All thath te timeoutsetting works for all "tasks"
:id: 6a6f5176-76bf-424d-bc10-d33bdfa529eb
:setup: Standalone Instance
:steps:
1. Test timeout for import task
2. Test timeout for export task
3. Test timeout for schema validate task
4. Test timeout for schema reload task
5. Test timeout for automember rebuild
6. Test timeout for automember abort
7. Test timeout for usn cleanup task
8. Test timeout for posix group fixup task
9. Test timeout for member UID fixup task
10. Test timeout for memberof fixup task
11. Test timeout for entryuuid fixup task
12. Test timeout for linked attrs fixup task
13. test timeout for db compact task
:expectedresults:
1. Task timed out
2. Task timed out
3. Task timed out
4. Task timed out
5. Task timed out
6. Task timed out
7. Task timed out
8. Task timed out
9. Task timed out
10. Task timed out
11. Task timed out
12. Task timed out
13. Task timed out
"""

#inst = topo.ms['supplier1'] --> this leads to a deadlock when testing MemberOfFixupTask
inst = topo.standalone

# Enable plugins
plugins = [USNPlugin, POSIXWinsyncPlugin, LinkedAttributesPlugin, AutoMembershipPlugin, MemberOfPlugin]
for plugin in plugins:
plugin(inst).enable()
inst.restart()

# Test timeout for import task, first create LDIF
import_ldif = inst.ldifdir + '/import_task_timeout.ldif'
dbgen_users(inst, 100000, import_ldif, DEFAULT_SUFFIX, parent="ou=people," + DEFAULT_SUFFIX, generic=True)

task = ImportTask(inst)
task.import_suffix_from_ldif(ldiffile=import_ldif, suffix=DEFAULT_SUFFIX)
task.wait(timeout=.5, sleep_interval=.5)
assert task.get_exit_code() is None
task.wait(timeout=0)

# Test timeout for export task
export_ldif = inst.ldifdir + '/export_task_timeout.ldif'
task = ExportTask(inst)
task.export_suffix_to_ldif(export_ldif, DEFAULT_SUFFIX)
task.wait(timeout=.5, sleep_interval=.5)
assert task.get_exit_code() is None
task.wait(timeout=0)

# Test timeout for schema validate task
task = SyntaxValidateTask(inst).create(properties={
'basedn': DEFAULT_SUFFIX,
'filter': "objectClass=*"
})
task.wait(timeout=.5, sleep_interval=.5)
assert task.get_exit_code() is None
task.wait(timeout=0)

# Test timeout for schema reload task (runs too fast)
"""
task = SchemaReloadTask(inst).create(properties={
'schemadir': inst.schemadir,
})
task.wait(timeout=.5, sleep_interval=.5)
assert task.get_exit_code() is None
task.wait(timeout=0)
"""

# Test timeout for automember rebuild
task = AutomemberRebuildMembershipTask(inst).create(properties={
'basedn': DEFAULT_SUFFIX,
'filter': "objectClass=*"
})
task.wait(timeout=.5, sleep_interval=.5)
assert task.get_exit_code() is None
task.wait(timeout=0)

# Test timeout for automember abort (runs too fast)
"""
AutomemberRebuildMembershipTask(inst).create(properties={
'basedn': DEFAULT_SUFFIX,
'filter': "objectClass=*"
})
task = AutomemberAbortRebuildTask(inst).create()
task.wait(timeout=.5, sleep_interval=.5)
assert task.get_exit_code() is None
task.wait(timeout=0)
"""

# Test timeout for usn cleanup task, first delete a bunch of users
for idx in range(1, 1001):
entry_idx = str(idx).zfill(6)
dn = f"uid=user{entry_idx},ou=people,{DEFAULT_SUFFIX}"
UserAccount(inst, dn=dn).delete()
task = USNTombstoneCleanupTask(inst).create(properties={
'suffix': DEFAULT_SUFFIX,
})
task.wait(timeout=.5, sleep_interval=.5)
assert task.get_exit_code() is None
task.wait(timeout=0)

# Test timeout for Posix Group fixup task (runs too fast)
"""
groups = PosixGroups(inst, DEFAULT_SUFFIX)
start_range = 10000
for idx in range(1, 10):
group_props = {
'cn': 'test_posix_group_' + str(idx),
'objectclass': ['posixGroup', 'groupofuniquenames'],
'gidNumber': str(idx)
}
group = groups.create(properties=group_props)
for user_idx in range(start_range, start_range + 1000):
entry_idx = str(user_idx).zfill(6)
dn = f"uid=user{entry_idx},ou=people,{DEFAULT_SUFFIX}"
group.add('memberuid', dn)
group.add('uniquemember', dn)
start_range += 1000
task = MemberUidFixupTask(inst).create(properties={
'basedn': DEFAULT_SUFFIX,
'filter': "objectClass=*"
})
task.wait(timeout=.5, sleep_interval=.5)
assert task.get_exit_code() is None
task.wait(timeout=0)
"""

# Test timeout for memberOf fixup task
groups = Groups(inst, DEFAULT_SUFFIX)
group_props = {'cn': 'test_group'}
group = groups.create(properties=group_props)
for idx in range(5000, 6000):
entry_idx = str(idx).zfill(6)
dn = f"uid=user{entry_idx},ou=people,{DEFAULT_SUFFIX}"
group.add_member(dn)

task = MemberOfFixupTask(inst).create(properties={
'basedn': DEFAULT_SUFFIX,
'filter': "objectClass=*"
})
task.wait(timeout=.5, sleep_interval=.5)
assert task.get_exit_code() is None
task.wait(timeout=0)

# Test timeout for entryuuid fixup task
task = EntryUUIDFixupTask(inst).create(properties={
'basedn': DEFAULT_SUFFIX,
'filter': "objectClass=*"
})
task.wait(timeout=.5, sleep_interval=.5)
assert task.get_exit_code() is None
task.wait(timeout=0)

# test timeout for linked attrs fixup (runs too fast)
"""
task = FixupLinkedAttributesTask(inst).create(properties={
'basedn': DEFAULT_SUFFIX,
'filter': "objectClass=*"
})
task.wait(timeout=.5, sleep_interval=.5)
assert task.get_exit_code() is None
task.wait(timeout=0)
"""

# Test time out for db compact task (runs too fast)
"""
task = DBCompactTask(inst).create()
task.wait(timeout=.5, sleep_interval=.5)
assert task.get_exit_code() is None
task.wait(timeout=0)
"""


if __name__ == '__main__':
# Run isolated
# -s for DEBUG mode
CURRENT_FILE = os.path.realpath(__file__)
pytest.main(["-s", CURRENT_FILE])
1 change: 1 addition & 0 deletions dirsrvtests/tests/suites/clu/fixup_test.py
Expand Up @@ -83,6 +83,7 @@ def test_posix_winsync_fixup(topology_st, set_log_file_and_ldif):
args = FakeArgs()
args.DN = DEFAULT_SUFFIX
args.filter = None
args.timeout = 0

log.info('Run Fixup task')
do_fixup(standalone, DEFAULT_SUFFIX, log, args)
Expand Down
1 change: 1 addition & 0 deletions dirsrvtests/tests/suites/import/import_warning_test.py
Expand Up @@ -106,6 +106,7 @@ def test_import_warning(topology_st):
args.only_core = False
args.include_suffixes = 'dc=example,dc=com'
args.exclude_suffixes = None
args.timeout = 0

log.info('Import the LDIF file')
backend_import(standalone, DEFAULT_SUFFIX, topology_st.logcap.log, args)
Expand Down
4 changes: 2 additions & 2 deletions ldap/servers/plugins/syntaxes/validate_task.c
Expand Up @@ -184,7 +184,7 @@ syntax_validate_task_thread(void *arg)
slapi_task_begin(task, 1);
slapi_task_log_notice(task, "Syntax validation task starting (arg: %s) ...\n",
td->filter_str);
slapi_log_err(SLAPI_LOG_ERR, SYNTAX_PLUGIN_SUBSYSTEM,
slapi_log_err(SLAPI_LOG_INFO, SYNTAX_PLUGIN_SUBSYSTEM,
"syntax_validate_task_thread - Starting (base: \"%s\", filter: \"%s\") ...\n",
td->dn, td->filter_str);

Expand All @@ -206,7 +206,7 @@ syntax_validate_task_thread(void *arg)
slapi_task_log_status(task, "Syntax validate task complete. Found %" PRIu64
" invalid entries.\n",
slapi_counter_get_value(td->invalid_entries));
slapi_log_err(SLAPI_LOG_ERR, SYNTAX_PLUGIN_SUBSYSTEM, "syntax_validate_task_thread - Complete."
slapi_log_err(SLAPI_LOG_INFO, SYNTAX_PLUGIN_SUBSYSTEM, "syntax_validate_task_thread - Complete."
" Found %" PRIu64 " invalid entries.\n",
slapi_counter_get_value(td->invalid_entries));
slapi_task_inc_progress(task);
Expand Down
33 changes: 25 additions & 8 deletions src/lib389/lib389/cli_conf/backend.py
@@ -1,5 +1,5 @@
# --- BEGIN COPYRIGHT BLOCK ---
# Copyright (C) 2022 Red Hat, Inc.
# Copyright (C) 2023 Red Hat, Inc.
# Copyright (C) 2019 William Brown <william@blackhats.net.au>
# All rights reserved.
#
Expand Down Expand Up @@ -256,7 +256,7 @@ def backend_import(inst, basedn, log, args):
task = mc.import_ldif(ldifs=args.ldifs, chunk_size=args.chunks_size, encrypted=args.encrypted,
gen_uniq_id=args.gen_uniq_id, only_core=args.only_core, include_suffixes=args.include_suffixes,
exclude_suffixes=args.exclude_suffixes)
task.wait(timeout=None)
task.wait(timeout=args.timeout)
result = task.get_exit_code()
warning = task.get_task_warn()

Expand All @@ -266,7 +266,10 @@ def backend_import(inst, basedn, log, args):
else:
log.info("The import task has finished successfully, with warning code {}, check the logs for more detail".format(warning))
else:
raise ValueError("Import task failed\n-------------------------\n{}".format(ensure_str(task.get_task_log())))
if result is None:
raise ValueError(f"Import task has not completed\n-------------------------\n{ensure_str(task.get_task_log())}")
else:
raise ValueError(f"Import task failed\n-------------------------\n{ensure_str(task.get_task_log())}")


def backend_export(inst, basedn, log, args):
Expand All @@ -288,13 +291,16 @@ def backend_export(inst, basedn, log, args):
encrypted=args.encrypted, min_base64=args.min_base64, no_dump_uniq_id=args.no_dump_uniq_id,
replication=args.replication, not_folded=args.not_folded, no_seq_num=args.no_seq_num,
include_suffixes=args.include_suffixes, exclude_suffixes=args.exclude_suffixes)
task.wait(timeout=None)
task.wait(timeout=args.timeout)
result = task.get_exit_code()

if task.is_complete() and result == 0:
log.info("The export task has finished successfully")
else:
raise ValueError("Export task failed\n-------------------------\n{}".format(ensure_str(task.get_task_log())))
if result is None:
raise ValueError(f"Export task did not complete\n-------------------------\n{ensure_str(task.get_task_log())}")
else:
raise ValueError(f"Export task failed\n-------------------------\n{ensure_str(task.get_task_log())}")


def is_db_link(inst, rdn):
Expand Down Expand Up @@ -815,9 +821,14 @@ def backend_compact(inst, basedn, log, args):
if args.only_changelog:
task_properties = {'justChangelog': 'yes'}
task.create(properties=task_properties)
task.wait()
if task.get_exit_code() != 0:
raise ValueError("Failed to create Database Compaction Task")
task.wait(timeout=args.timeout)
result = task.get_exit_code()
if result != 0:
if result is None:
raise ValueError("Database Compaction Task has not completed")
else:
raise ValueError(f"Database Compaction Task failed, error: {result}")

log.info("Successfully started Database Compaction Task")


Expand Down Expand Up @@ -1103,6 +1114,8 @@ def create_parser(subparsers):
help="Specifies the suffixes or the subtrees to be included")
import_parser.add_argument('-x', '--exclude-suffixes', nargs='+',
help="Specifies the suffixes to be excluded")
import_parser.add_argument('--timeout', type=int, default=0,
help="Set a timeout to wait for the export task. Default is 0 (no timeout)")

#######################################################
# Export LDIF
Expand Down Expand Up @@ -1132,6 +1145,8 @@ def create_parser(subparsers):
help="Specifies the suffixes or the subtrees to be included")
export_parser.add_argument('-x', '--exclude-suffixes', nargs='+',
help="Specifies the suffixes to be excluded")
export_parser.add_argument('--timeout', default=0, type=int,
help="Set a timeout to wait for the export task. Default is 0 (no timeout)")

#######################################################
# Create a new backend database
Expand Down Expand Up @@ -1165,3 +1180,5 @@ def create_parser(subparsers):
compact_parser = subcommands.add_parser('compact-db', help='Compact the database and the replication changelog')
compact_parser.set_defaults(func=backend_compact)
compact_parser.add_argument('--only-changelog', action='store_true', help='Compacts only the replication change log')
compact_parser.add_argument('--timeout', default=0, type=int,
help="Set a timeout to wait for the compaction task. Default is 0 (no timeout)")

0 comments on commit 26eaa1b

Please sign in to comment.