diff --git a/codecov.yml b/codecov.yml index 9c67c4c4a8..ff342d5aa9 100644 --- a/codecov.yml +++ b/codecov.yml @@ -8,7 +8,7 @@ coverage: threshold: 0.35% codecov: notify: - after_n_builds: 29 + after_n_builds: 30 comment: - after_n_builds: 29 + after_n_builds: 30 layout: "condensed_header, flags, files, condensed_footer" diff --git a/crmsh/bootstrap.py b/crmsh/bootstrap.py index 4b23ecf5dd..64ff27f24d 100644 --- a/crmsh/bootstrap.py +++ b/crmsh/bootstrap.py @@ -10,6 +10,7 @@ # simplicity and flexibility. # import codecs +import dataclasses import io import os import subprocess @@ -26,7 +27,7 @@ from string import Template from lxml import etree -from . import config, constants, ssh_key, sh +from . import config, constants, ssh_key, sh, cibquery, user_of_host from . import utils from . import xmlutil from .cibconfig import cib_factory @@ -266,7 +267,7 @@ def _validate_cluster_node(self): # self.cluster_node might be hostname or IP address ip_addr = socket.gethostbyname(node) if utils.InterfacesInfo.ip_in_local(ip_addr): - utils.fatal("Please specify peer node's hostname or IP address") + utils.fatal(f"\"{node}\" is the local node. Please specify peer node's hostname or IP address") except socket.gaierror as err: utils.fatal(f"\"{node}\": {err}") @@ -630,11 +631,6 @@ def check_prereqs(): logger.warning("{} is not configured to start at system boot.".format(timekeeper)) warned = True - if _context.use_ssh_agent == False and 'SSH_AUTH_SOCK' in os.environ: - msg = "$SSH_AUTH_SOCK is detected. As a tip, using the --use-ssh-agent option could avoid generate local root ssh keys on cluster nodes." - logger.warning(msg) - warned = True - if warned: if not confirm("Do you want to continue anyway?"): return False @@ -824,11 +820,10 @@ def _parse_user_at_host(s: str, default_user: str) -> typing.Tuple[str, str]: def _keys_from_ssh_agent() -> typing.List[ssh_key.Key]: try: keys = ssh_key.AgentClient().list() - logger.info("Using public keys from ssh-agent...") + return keys except ssh_key.Error: - logger.error("Cannot get a public key from ssh-agent.") - raise - return keys + logger.debug("Cannot get a public key from ssh-agent.", exc_info=True) + return list() def init_ssh(): @@ -861,6 +856,7 @@ def init_ssh_impl(local_user: str, ssh_public_keys: typing.List[ssh_key.Key], us logger.info("Adding public keys to authorized_keys for user %s...", local_user) for key in ssh_public_keys: authorized_key_manager.add(None, local_user, key) + logger.info("Added public key %s.", key.fingerprint()) else: configure_ssh_key(local_user) configure_ssh_key('hacluster') @@ -868,17 +864,9 @@ def init_ssh_impl(local_user: str, ssh_public_keys: typing.List[ssh_key.Key], us user_by_host = utils.HostUserConfig() user_by_host.clear() - user_by_host.set_no_generating_ssh_key(bool(ssh_public_keys)) user_by_host.save_local() if user_node_list: - print() - if ssh_public_keys: - for user, node in user_node_list: - logger.info("Adding public keys to authorized_keys on %s@%s", user, node) - for key in ssh_public_keys: - authorized_key_manager.add(node, local_user, key) - else: - _init_ssh_on_remote_nodes(local_user, user_node_list) + _init_ssh_on_remote_nodes(local_shell, local_user, user_node_list) for user, node in user_node_list: if user != 'root' and 0 != shell.subprocess_run_without_input( node, user, 'sudo true', @@ -904,28 +892,42 @@ def init_ssh_impl(local_user: str, ssh_public_keys: typing.List[ssh_key.Key], us def _init_ssh_on_remote_nodes( + local_shell: sh.LocalShell, local_user: str, user_node_list: typing.List[typing.Tuple[str, str]], ): # Swap public ssh key between remote node and local + ssh_shell = sh.SSHShell(local_shell, local_user) + authorized_key_manager = ssh_key.AuthorizedKeyManager(ssh_shell) public_key_list = list() - for i, (remote_user, node) in enumerate(user_node_list): - utils.ssh_copy_id(local_user, remote_user, node) - # After this, login to remote_node is passwordless - public_key_list.append(swap_public_ssh_key(node, local_user, remote_user, local_user, remote_user)) - if len(user_node_list) > 1: - shell = sh.LocalShell() - shell_script = _merge_line_into_file('~/.ssh/authorized_keys', public_key_list).encode('utf-8') - for i, (remote_user, node) in enumerate(user_node_list): - result = shell.su_subprocess_run( - local_user, - 'ssh {} {}@{} /bin/sh'.format(constants.SSH_OPTION, remote_user, node), - input=shell_script, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, + for user, node in user_node_list: + logger.info("Adding public keys to authorized_keys on %s@%s", user, node) + result = ssh_copy_id_no_raise(local_user, user, node, local_shell) + if result.returncode != 0: + utils.fatal("Failed to login to remote host {}@{}".format(user, node)) + elif not result.public_keys: + pass + elif isinstance(result.public_keys[0], ssh_key.KeyFile): + public_key = ssh_key.InMemoryPublicKey( + generate_ssh_key_pair_on_remote(local_shell, local_user, node, user, user), ) - if result.returncode != 0: - utils.fatal('Failed to add public keys to {}@{}: {}'.format(remote_user, node, result.stdout)) + public_key_list.append(public_key) + authorized_key_manager.add(node, user, public_key) + authorized_key_manager.add(None, local_user, public_key) + shell_script = _merge_line_into_file( + '~/.ssh/authorized_keys', + (key.public_key() for key in public_key_list), + ).encode('utf-8') + for i, (remote_user, node) in enumerate(user_node_list): + result = local_shell.su_subprocess_run( + local_user, + 'ssh {} {}@{} /bin/sh'.format(constants.SSH_OPTION, remote_user, node), + input=shell_script, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + if result.returncode != 0: + utils.fatal('Failed to add public keys to {}@{}: {}'.format(remote_user, node, result.stdout)) def _init_ssh_for_secondary_user_on_remote_nodes( @@ -950,7 +952,8 @@ def _init_ssh_for_secondary_user_on_remote_nodes( def _merge_line_into_file(path: str, lines: typing.Iterable[str]) -> str: - shell_script = '''[ -e "$path" ] || echo '# created by crmsh' > "$path" + shell_script = '''set -e +[ -e "$path" ] || echo '# created by crmsh' > "$path" for key in "${keys[@]}"; do grep -F "$key" "$path" > /dev/null || sed -i "\\$a $key" "$path" done''' @@ -1026,18 +1029,45 @@ def configure_ssh_key(user): if is_generated: logger.info("A new ssh keypair is generated for user %s.", user) authorized_key_manager.add(None, user, keys[0]) + logger.info("A public key is added to authorized_keys for user %s: %s", user, keys[0].fingerprint()) + + +@dataclasses.dataclass(frozen=True) +class SshCopyIdResult: + returncode: int + public_keys: list[ssh_key.Key] + + +def ssh_copy_id_no_raise(local_user, remote_user, remote_node, shell: sh.LocalShell = None) -> SshCopyIdResult: + if shell is None: + shell = sh.LocalShell() + if utils.check_ssh_passwd_need(local_user, remote_user, remote_node, shell): + configure_ssh_key(local_user) + public_keys = ssh_key.fetch_public_key_file_list(None, local_user) + logger.info("Configuring SSH passwordless with {}@{}".format(remote_user, remote_node)) + cmd = f"ssh-copy-id -i {public_keys[0].public_key_file()} '{remote_user}@{remote_node}' &> /dev/null" + result = shell.su_subprocess_run(local_user, cmd, tty=True) + return SshCopyIdResult(result.returncode, public_keys) + else: + return SshCopyIdResult(0, list()) + + +def ssh_copy_id(local_user, remote_user, remote_node): + if 0 != ssh_copy_id_no_raise(local_user, remote_user, remote_node).returncode: + utils.fatal("Failed to login to remote host {}@{}".format(remote_user, remote_node)) def generate_ssh_key_pair_on_remote( + shell: sh.LocalShell, local_sudoer: str, remote_host: str, remote_sudoer: str, - remote_user: str + remote_user: str, ) -> str: """generate a key pair on remote and return the public key""" - shell = sh.LocalShell() # pass cmd through stdin rather than as arguments. It seems sudo has its own argument parsing mechanics, # which breaks shell expansion used in cmd generate_key_script = f''' +set -e key_types=({ ' '.join(ssh_key.KeyFileManager.KNOWN_KEY_TYPES) }) for key_type in "${{key_types[@]}}"; do priv_key_file=~/.ssh/id_${{key_type}} @@ -1089,7 +1119,11 @@ def generate_ssh_key_pair_on_remote( return result.stdout.decode('utf-8').strip() -def export_ssh_key_non_interactive(local_user_to_export, remote_user_to_swap, remote_node, local_sudoer, remote_sudoer): +def export_ssh_key_non_interactive( + shell: sh.LocalShell, + local_user_to_export, remote_user_to_swap, + remote_node, local_sudoer, remote_sudoer, +): """Copy ssh key from local to remote's authorized_keys. Require a configured non-interactive ssh authentication.""" # ssh-copy-id will prompt for the password of the destination user # this is unwanted, so we write to the authorised_keys file ourselve @@ -1099,7 +1133,7 @@ def export_ssh_key_non_interactive(local_user_to_export, remote_user_to_swap, re {key} EOF '''.format(user=remote_user_to_swap, key=public_key) - result = sh.LocalShell().su_subprocess_run( + result = shell.su_subprocess_run( local_sudoer, 'ssh {} {}@{} sudo /bin/sh'.format(constants.SSH_OPTION, remote_sudoer, remote_node), input=cmd.encode('utf-8'), @@ -1545,39 +1579,31 @@ def configure_qdevice_interactive(): def _setup_passwordless_ssh_for_qnetd(cluster_node_list: typing.List[str]): local_user, qnetd_user, qnetd_addr = _select_user_pair_for_ssh_for_secondary_components(_context.qnetd_addr_input) # Configure ssh passwordless to qnetd if detect password is needed - if UserOfHost.instance().use_ssh_agent(): - logger.info("Adding public keys to authorized_keys for user root...") - for key in ssh_key.AgentClient().list(): - ssh_key.AuthorizedKeyManager(sh.SSHShell( - sh.LocalShell(additional_environ={'SSH_AUTH_SOCK': os.environ.get('SSH_AUTH_SOCK')}), - 'root', - )).add(qnetd_addr, qnetd_user, key) - else: - if 0 != utils.ssh_copy_id_no_raise( - local_user, qnetd_user, qnetd_addr, - sh.LocalShell(additional_environ={'SSH_AUTH_SOCK': ''}), - ): - msg = f"Failed to login to {qnetd_user}@{qnetd_addr}. Please check the credentials." - sudoer = userdir.get_sudoer() - if sudoer and qnetd_user != sudoer: - args = ['sudo crm'] - args += [x for x in sys.argv[1:]] - for i, arg in enumerate(args): - if arg == '--qnetd-hostname' and i + 1 < len(args): - if '@' not in args[i + 1]: - args[i + 1] = f'{sudoer}@{qnetd_addr}' - msg += '\nOr, run "{}".'.format(' '.join(args)) - raise ValueError(msg) - - cluster_shell = sh.cluster_shell() - # Add other nodes' public keys to qnetd's authorized_keys - for node in cluster_node_list: - if node == utils.this_node(): - continue - local_user, remote_user, node = _select_user_pair_for_ssh_for_secondary_components(node) - remote_key_content = ssh_key.fetch_public_key_content_list(node, remote_user)[0] - in_memory_key = ssh_key.InMemoryPublicKey(remote_key_content) - ssh_key.AuthorizedKeyManager(cluster_shell).add(qnetd_addr, qnetd_user, in_memory_key) + if 0 != ssh_copy_id_no_raise( + local_user, qnetd_user, qnetd_addr, + sh.LocalShell(additional_environ={'SSH_AUTH_SOCK': os.environ.get('SSH_AUTH_SOCK', '')}), + ).returncode: + msg = f"Failed to login to {qnetd_user}@{qnetd_addr}. Please check the credentials." + sudoer = userdir.get_sudoer() + if sudoer and qnetd_user != sudoer: + args = ['sudo crm'] + args += [x for x in sys.argv[1:]] + for i, arg in enumerate(args): + if arg == '--qnetd-hostname' and i + 1 < len(args): + if '@' not in args[i + 1]: + args[i + 1] = f'{sudoer}@{qnetd_addr}' + msg += '\nOr, run "{}".'.format(' '.join(args)) + raise ValueError(msg) + + cluster_shell = sh.cluster_shell() + # Add other nodes' public keys to qnetd's authorized_keys + for node in cluster_node_list: + if node == utils.this_node(): + continue + local_user, remote_user, node = _select_user_pair_for_ssh_for_secondary_components(node) + remote_key_content = ssh_key.fetch_public_key_content_list(node, remote_user)[0] + in_memory_key = ssh_key.InMemoryPublicKey(remote_key_content) + ssh_key.AuthorizedKeyManager(cluster_shell).add(qnetd_addr, qnetd_user, in_memory_key) user_by_host = utils.HostUserConfig() user_by_host.add(local_user, utils.this_node()) @@ -1639,25 +1665,36 @@ def join_ssh_impl(local_user, seed_host, seed_user, ssh_public_keys: typing.List ServiceManager(sh.ClusterShellAdaptorForLocalShell(sh.LocalShell())).start_service("sshd.service", enable=True) if ssh_public_keys: local_shell = sh.LocalShell(additional_environ={'SSH_AUTH_SOCK': os.environ.get('SSH_AUTH_SOCK')}) - join_ssh_with_ssh_agent(local_shell, local_user, seed_host, seed_user, ssh_public_keys) else: local_shell = sh.LocalShell(additional_environ={'SSH_AUTH_SOCK': ''}) - configure_ssh_key(local_user) - if 0 != utils.ssh_copy_id_no_raise(local_user, seed_user, seed_host, local_shell): - msg = f"Failed to login to {seed_user}@{seed_host}. Please check the credentials." - sudoer = userdir.get_sudoer() - if sudoer and seed_user != sudoer: - args = ['sudo crm'] - args += [x for x in sys.argv[1:]] - for i, arg in enumerate(args): - if arg == '-c' or arg == '--cluster-node' and i + 1 < len(args): - if '@' not in args[i+1]: - args[i + 1] = f'{sudoer}@{seed_host}' - msg += '\nOr, run "{}".'.format(' '.join(args)) - raise ValueError(msg) - # After this, login to remote_node is passwordless - swap_public_ssh_key(seed_host, local_user, seed_user, local_user, seed_user) + result = ssh_copy_id_no_raise(local_user, seed_user, seed_host, local_shell) + if 0 != result.returncode: + msg = f"Failed to login to {seed_user}@{seed_host}. Please check the credentials." + sudoer = userdir.get_sudoer() + if sudoer and seed_user != sudoer: + args = ['sudo crm'] + args += [x for x in sys.argv[1:]] + for i, arg in enumerate(args): + if arg == '-c' or arg == '--cluster-node' and i + 1 < len(args): + if '@' not in args[i+1]: + args[i + 1] = f'{sudoer}@{seed_host}' + msg += '\nOr, run "{}".'.format(' '.join(args)) + raise ValueError(msg) + # From here, login to remote_node is passwordless ssh_shell = sh.SSHShell(local_shell, local_user) + authorized_key_manager = ssh_key.AuthorizedKeyManager(ssh_shell) + if not result.public_keys: + pass + elif isinstance(result.public_keys[0], ssh_key.KeyFile): + public_key = ssh_key.InMemoryPublicKey( + generate_ssh_key_pair_on_remote(local_shell, local_user, seed_host, seed_user, seed_user), + ) + authorized_key_manager.add( None, local_user, public_key) + logger.info('A public key is added to authorized_keys for user %s: %s', local_user, public_key.fingerprint()) + elif isinstance(result.public_keys[0], ssh_key.InMemoryPublicKey): + authorized_key_manager.add(None, local_user, result.public_keys[0]) + logger.info('A public key is added to authorized_keys for user %s: %s', local_user, result.public_keys[0].fingerprint()) + # else is not None do nothing if seed_user != 'root' and 0 != ssh_shell.subprocess_run_without_input( seed_host, seed_user, 'sudo true', stdout=subprocess.DEVNULL, @@ -1669,7 +1706,6 @@ def join_ssh_impl(local_user, seed_host, seed_user, ssh_public_keys: typing.List user_by_host.clear() user_by_host.add(seed_user, seed_host) user_by_host.add(local_user, utils.this_node()) - user_by_host.set_no_generating_ssh_key(bool(ssh_public_keys)) user_by_host.save_local() detect_cluster_service_on_node(seed_host) user_by_host.add(seed_user, get_node_canonical_hostname(seed_host)) @@ -1680,7 +1716,7 @@ def join_ssh_impl(local_user, seed_host, seed_user, ssh_public_keys: typing.List swap_public_ssh_key_for_secondary_user(sh.cluster_shell(), seed_host, 'hacluster') if _context.stage: - setup_passwordless_with_other_nodes(seed_host, seed_user) + setup_passwordless_with_other_nodes(seed_host) def join_ssh_with_ssh_agent( @@ -1714,16 +1750,26 @@ def swap_public_ssh_key( local_user_to_swap, remote_user_to_swap, local_sudoer, - remote_sudoer + remote_sudoer, + local_shell: sh.LocalShell = None, # FIXME: should not have default value ): """ Swap public ssh key between remote_node and local """ + if local_shell is None: + local_shell = sh.LocalShell() # Detect whether need password to login to remote_node - if utils.check_ssh_passwd_need(local_user_to_swap, remote_user_to_swap, remote_node): - export_ssh_key_non_interactive(local_user_to_swap, remote_user_to_swap, remote_node, local_sudoer, remote_sudoer) + if utils.check_ssh_passwd_need(local_user_to_swap, remote_user_to_swap, remote_node, local_shell): + export_ssh_key_non_interactive( + local_shell, + local_user_to_swap, remote_user_to_swap, + remote_node, local_sudoer, remote_sudoer, + ) - public_key = generate_ssh_key_pair_on_remote(local_sudoer, remote_node, remote_sudoer, remote_user_to_swap) + public_key = generate_ssh_key_pair_on_remote( + local_shell, + local_sudoer, remote_node, remote_sudoer, remote_user_to_swap, + ) ssh_key.AuthorizedKeyManager(sh.SSHShell(sh.LocalShell(), local_user_to_swap)).add( None, local_user_to_swap, ssh_key.InMemoryPublicKey(public_key), ) @@ -1807,7 +1853,7 @@ def join_ssh_merge(cluster_node, remote_user): shell.get_stdout_or_raise_error(script, host) -def setup_passwordless_with_other_nodes(init_node, remote_user): +def setup_passwordless_with_other_nodes(init_node): """ Setup passwordless with other cluster nodes @@ -1815,27 +1861,14 @@ def setup_passwordless_with_other_nodes(init_node, remote_user): """ # Fetch cluster nodes list local_user = _context.current_user - shell = sh.cluster_shell() - rc, out, err = shell.get_rc_stdout_stderr_without_input(init_node, 'crm_node -l') + local_shell = sh.LocalShell( + additional_environ={'SSH_AUTH_SOCK': os.environ.get('SSH_AUTH_SOCK', '') if _context.use_ssh_agent else ''}, + ) + shell = sh.ClusterShell(local_shell, user_of_host.UserOfHost.instance(), _context.use_ssh_agent, True) + rc, out, err = shell.get_rc_stdout_stderr_without_input(init_node, constants.CIB_QUERY) if rc != 0: utils.fatal("Can't fetch cluster nodes list from {}: {}".format(init_node, err)) - cluster_nodes_list = [] - for line in out.splitlines(): - # Parse line in format: , and collect the - # nodename. - tokens = line.split() - if len(tokens) == 0: - pass # Skip any spurious empty line. - elif len(tokens) < 3: - logger.warning("Unable to configure passwordless ssh with nodeid {}. The " - "node has no known name and/or state information".format( - tokens[0])) - elif tokens[2] != "member": - logger.warning("Skipping configuration of passwordless ssh with node {} in " - "state '{}'. The node is not a current member".format( - tokens[1], tokens[2])) - else: - cluster_nodes_list.append(tokens[1]) + cluster_node_list = [x.uname for x in cibquery.get_cluster_nodes(etree.fromstring(out))] user_by_host = utils.HostUserConfig() user_by_host.add(local_user, utils.this_node()) try: @@ -1851,22 +1884,34 @@ def setup_passwordless_with_other_nodes(init_node, remote_user): rc, out, err = shell.get_rc_stdout_stderr_without_input(init_node, 'hostname') if rc != 0: utils.fatal("Can't fetch hostname of {}: {}".format(init_node, err)) + init_node_hostname = out # Swap ssh public key between join node and other cluster nodes - if not _context.use_ssh_agent: - for node in (node for node in cluster_nodes_list if node != out): - remote_user_to_swap = utils.user_of(node) - remote_privileged_user = remote_user_to_swap - utils.ssh_copy_id(local_user, remote_privileged_user, node) - swap_public_ssh_key(node, local_user, remote_user_to_swap, local_user, remote_privileged_user) - if local_user != 'hacluster': - change_user_shell('hacluster', node) - swap_public_ssh_key(node, 'hacluster', 'hacluster', local_user, remote_privileged_user) + for node in (node for node in cluster_node_list if node != init_node_hostname): + remote_user_to_swap = utils.user_of(node) + remote_privileged_user = remote_user_to_swap + result = ssh_copy_id_no_raise(local_user, remote_privileged_user, node, local_shell) + if result.returncode != 0: + utils.fatal("Failed to login to remote host {}@{}".format(remote_user_to_swap, node)) + _merge_ssh_authorized_keys(cluster_node_list) if local_user != 'hacluster': - swap_key_for_hacluster(cluster_nodes_list) - else: - swap_key_for_hacluster(cluster_nodes_list) + change_user_shell('hacluster', node) + swap_public_ssh_key(node, 'hacluster', 'hacluster', local_user, remote_privileged_user, local_shell) + if local_user != 'hacluster': + swap_key_for_hacluster(cluster_node_list) - user_by_host.save_remote(cluster_nodes_list) + user_by_host.save_remote(cluster_node_list) + + +def _merge_ssh_authorized_keys(nodes: typing.Sequence[str]): + keys = set() + with tempfile.TemporaryDirectory(prefix='crmsh-bootstrap-') as tmpdir: + # sftp does not accept `~` + for host, file in parallax.parallax_slurp(nodes, tmpdir, '.ssh/authorized_keys'): + with open(file, 'r', encoding='utf-8') as f: + for line in f: + if line.startswith('ssh-'): + keys.add(line.rstrip()) + parallax.parallax_run(nodes, _merge_line_into_file('~/.ssh/authorized_keys', keys)) def swap_key_for_hacluster(other_node_list): @@ -2261,10 +2306,14 @@ def bootstrap_add(context): options += '-i {} '.format(nic) options = " {}".format(options.strip()) if options else "" - if context.use_ssh_agent: - options += ' --use-ssh-agent' + if not context.use_ssh_agent: + options += ' --no-use-ssh-agent' - shell = sh.ClusterShell(sh.LocalShell(), UserOfHost.instance(), _context.use_ssh_agent) + shell = sh.ClusterShell( + sh.LocalShell({'SSH_AUTH_SOCK': os.environ.get('SSH_AUTH_SOCK', '') if _context.use_ssh_agent else ''}), + UserOfHost.instance(), + _context.use_ssh_agent, + ) for (user, node) in (_parse_user_at_host(x, _context.current_user) for x in _context.user_at_node_list): print() logger.info("Adding node {} to cluster".format(node)) @@ -2327,7 +2376,7 @@ def bootstrap_join(context): with lock_inst.lock(): service_manager = ServiceManager() _context.node_list_in_cluster = utils.fetch_cluster_node_list_from_node(cluster_node) - setup_passwordless_with_other_nodes(cluster_node, remote_user) + setup_passwordless_with_other_nodes(cluster_node) _context.skip_csync2 = not service_manager.service_is_active(CSYNC2_SERVICE, cluster_node) if _context.skip_csync2: service_manager.stop_service(CSYNC2_SERVICE, disable=True) @@ -2635,22 +2684,15 @@ def bootstrap_join_geo(context): user, node = utils.parse_user_at_host(_context.cluster_node) if not sh.cluster_shell().can_run_as(node, 'root'): local_user, remote_user, node = _select_user_pair_for_ssh_for_secondary_components(_context.cluster_node) - if context.use_ssh_agent: - keys = _keys_from_ssh_agent() - local_shell = sh.LocalShell(additional_environ={'SSH_AUTH_SOCK': os.environ.get('SSH_AUTH_SOCK')}) - join_ssh_with_ssh_agent(local_shell, local_user, node, remote_user, keys) - else: - configure_ssh_key(local_user) - if 0 != utils.ssh_copy_id_no_raise( - local_user, remote_user, node, - sh.LocalShell(additional_environ={'SSH_AUTH_SOCK': ''}), - ): - raise ValueError(f"Failed to login to {remote_user}@{node}. Please check the credentials.") - swap_public_ssh_key(node, local_user, remote_user, local_user, remote_user) + local_shell = sh.LocalShell(additional_environ={ + 'SSH_AUTH_SOCK': os.environ.get('SSH_AUTH_SOCK', '') if _context.use_ssh_agent else '', + }) + result = ssh_copy_id_no_raise(local_user, remote_user, node, local_shell) + if 0 != result.returncode: + raise ValueError(f"Failed to login to {remote_user}@{node}. Please check the credentials.") user_by_host = utils.HostUserConfig() user_by_host.add(local_user, utils.this_node()) user_by_host.add(remote_user, node) - user_by_host.set_no_generating_ssh_key(context.use_ssh_agent) user_by_host.save_local() geo_fetch_config(node) logger.info("Sync booth configuration across cluster") @@ -2674,21 +2716,14 @@ def bootstrap_arbitrator(context): user_by_host.save_local() if not sh.cluster_shell().can_run_as(node, 'root'): local_user, remote_user, node = _select_user_pair_for_ssh_for_secondary_components(_context.cluster_node) - if context.use_ssh_agent: - keys = _keys_from_ssh_agent() - local_shell = sh.LocalShell(additional_environ={'SSH_AUTH_SOCK': os.environ.get('SSH_AUTH_SOCK')}) - join_ssh_with_ssh_agent(local_shell, local_user, node, remote_user, keys) - else: - configure_ssh_key(local_user) - if 0 != utils.ssh_copy_id_no_raise( - local_user, remote_user, node, - sh.LocalShell(additional_environ={'SSH_AUTH_SOCK': ''}), - ): - raise ValueError(f"Failed to login to {remote_user}@{node}. Please check the credentials.") - swap_public_ssh_key(node, local_user, remote_user, local_user, remote_user) + local_shell = sh.LocalShell(additional_environ={ + 'SSH_AUTH_SOCK': os.environ.get('SSH_AUTH_SOCK', '') if _context.use_ssh_agent else '', + }) + result = ssh_copy_id_no_raise(local_user, remote_user, node, local_shell) + if 0 != result.returncode: + raise ValueError(f"Failed to login to {remote_user}@{node}. Please check the credentials.") user_by_host.add(local_user, utils.this_node()) user_by_host.add(remote_user, node) - user_by_host.set_no_generating_ssh_key(context.use_ssh_agent) user_by_host.save_local() geo_fetch_config(node) if not os.path.isfile(BOOTH_CFG): diff --git a/crmsh/config.py b/crmsh/config.py index 34a64d1e67..70cae670c1 100644 --- a/crmsh/config.py +++ b/crmsh/config.py @@ -240,7 +240,7 @@ def get(self, value): 'pager': opt_program('PAGER', ('less', 'more', 'pg')), 'user': opt_string(''), 'hosts': opt_list([]), # 'alice@host1, bob@host2' - 'no_generating_ssh_key': opt_boolean('no'), + 'no_generating_ssh_key': opt_boolean('no'), # deprecated 'skill_level': opt_choice('expert', ('operator', 'administrator', 'expert')), 'sort_elements': opt_boolean('yes'), 'check_frequency': opt_choice('always', ('always', 'on-verify', 'never')), diff --git a/crmsh/prun/prun.py b/crmsh/prun/prun.py index 23c6b91061..25b35ec00f 100644 --- a/crmsh/prun/prun.py +++ b/crmsh/prun/prun.py @@ -12,7 +12,7 @@ _DEFAULT_CONCURRENCY = 32 -_SUDO_SFTP_SERVER = 'sudo PATH=/usr/lib/ssh:/usr/lib/openssh:/usr/libexec/ssh:/usr/libexec/openssh /bin/sh -c "exec sftp-server"' +_SUDO_SFTP_SERVER = 'sudo --preserve-env=SSH_AUTH_SOCK PATH=/usr/lib/ssh:/usr/lib/openssh:/usr/libexec/ssh:/usr/libexec/openssh /bin/sh -c "exec sftp-server"' class ProcessResult: @@ -117,11 +117,11 @@ def _build_run_task(remote: str, cmdline: str) -> Task: ) else: local_sudoer, remote_sudoer = UserOfHost.instance().user_pair_for_ssh(remote) - shell = 'ssh {} {}@{} sudo -H /bin/sh'.format(crmsh.constants.SSH_OPTION, remote_sudoer, remote) + shell = 'ssh -A {} {}@{} sudo -H /bin/sh'.format(crmsh.constants.SSH_OPTION, remote_sudoer, remote) if local_sudoer == crmsh.userdir.getuser(): args = ['/bin/sh', '-c', shell] elif os.geteuid() == 0: - args = ['su', local_sudoer, '--login', '-c', shell] + args = ['su', local_sudoer, '--login', '-c', shell, '-w', 'SSH_AUTH_SOCK'] else: raise AssertionError('trying to run su as a non-root user') return Task( @@ -171,7 +171,7 @@ def pcopy_to_remote( ssh = tempfile.NamedTemporaryFile('w', encoding='utf-8', delete=False) os.fchmod(ssh.fileno(), 0o700) ssh.write(f'''#!/bin/sh -exec sudo -u {local_sudoer} ssh "$@"''') +exec sudo --preserve-env=SSH_AUTH_SOCK -u {local_sudoer} ssh "$@"''') # It is necessary to close the file before executing, or we will get an EBUSY. ssh.close() tasks = [_build_copy_task("-S '{}'".format(ssh.name), script, host) for host in hosts] @@ -233,7 +233,7 @@ def pfetch_from_remote( ssh = tempfile.NamedTemporaryFile('w', encoding='utf-8', delete=False) os.fchmod(ssh.fileno(), 0o700) ssh.write(f'''#!/bin/sh - exec sudo -u {local_sudoer} ssh "$@"''') + exec sudo --preserve-env=SSH_AUTH_SOCK -u {local_sudoer} ssh "$@"''') # It is necessary to close the file before executing ssh.close() tasks = [_build_fetch_task("-S '{}'".format(ssh.name), host, src, dst, flags) for host in hosts] diff --git a/crmsh/prun/runner.py b/crmsh/prun/runner.py index 12682f4f7a..92dc99acbc 100644 --- a/crmsh/prun/runner.py +++ b/crmsh/prun/runner.py @@ -37,6 +37,9 @@ def __init__( # Caller can pass arbitrary data to context, it is kept untouched. self.context = context + def __repr__(self): + return f"Task({self.args}, {self.input}, {self.stdout_config}, {self.stderr_config}, {self.context})" + class Runner: def __init__(self, concurrency): diff --git a/crmsh/report/core.py b/crmsh/report/core.py index 5a9470173f..f461d97091 100644 --- a/crmsh/report/core.py +++ b/crmsh/report/core.py @@ -406,12 +406,6 @@ def find_ssh_user(context: Context) -> None: if ret is not None: logger.debug("passwordless ssh to %s is OK", n) context.passwordless_shell_for_nodes[n] = ret - elif user_of_host.use_ssh_agent() and 'SSH_AUTH_SOCK' not in os.environ: - with StringIO() as buf: - buf.write('Environment variable SSH_AUTH_SOCK does not exist.') - if 'SUDO_USER' in os.environ: - buf.write(' Please check whether ssh-agent is available and consider using "sudo --preserve-env=SSH_AUTH_SOCK".') - logger.warning('%s', buf.getvalue()) else: logger.warning("passwordless ssh to node %s does not work", n) if not crmutils.can_ask(): diff --git a/crmsh/sh.py b/crmsh/sh.py index 006e964ae2..30c4b3d1a1 100644 --- a/crmsh/sh.py +++ b/crmsh/sh.py @@ -57,13 +57,7 @@ def __init__(self, cmd: str, host: typing.Optional[str], user: str, msg: str): self.user = user def diagnose(self) -> str: - with StringIO() as buf: - if user_of_host.instance().use_ssh_agent(): - if 'SSH_AUTH_SOCK' not in os.environ: - buf.write('Environment variable SSH_AUTH_SOCK does not exist.') - if 'SUDO_USER' in os.environ: - buf.write(' Please check whether ssh-agent is available and consider using "sudo --preserve-env=SSH_AUTH_SOCK".') - return buf.getvalue() + return '' class NonInteractiveSSHAuthorizationError(AuthorizationError): @@ -511,5 +505,10 @@ def subprocess_run_without_input(self, host: str, user: typing.Optional[str], cm def cluster_shell(): - return ClusterShell(LocalShell(), user_of_host.instance(), raise_ssh_error=True) + return ClusterShell( + LocalShell(additional_environ={'SSH_AUTH_SOCK': os.environ.get('SSH_AUTH_SOCK', '')}), + user_of_host.instance(), + forward_ssh_agent=True, + raise_ssh_error=True, + ) diff --git a/crmsh/ssh_key.py b/crmsh/ssh_key.py index d23c59ef37..21e9f24011 100644 --- a/crmsh/ssh_key.py +++ b/crmsh/ssh_key.py @@ -48,11 +48,15 @@ class Key: def public_key(self) -> str: raise NotImplementedError + def fingerprint(self) -> str: + raise NotImplementedError + class KeyFile(Key): def __init__(self, path: str): self._path = os.path.realpath(path) self._public_key = None + self._fingerprint = None def public_key_file(self) -> typing.Optional[str]: return self._path @@ -65,6 +69,21 @@ def public_key(self) -> str: self._public_key = f.read().strip() return self._public_key + def fingerprint(self) -> str: + if self._fingerprint: + return self._fingerprint + else: + result = subprocess.run( + ['ssh-keygen', '-l', '-f', self.public_key_file()], + stdin=subprocess.DEVNULL, + stdout=subprocess.PIPE, + ) + if result.returncode == 0: + self._fingerprint = result.stdout.decode('utf-8', 'backslashreplace').strip() + return self._fingerprint + else: + raise ValueError(f'Failed to generate fingerprint: {result.returncode}.') + def __eq__(self, other): return isinstance(other, KeyFile) and self._path == other._path and self.public_key() == other.public_key() @@ -75,10 +94,27 @@ def __repr__(self): class InMemoryPublicKey(Key): def __init__(self, content: str): self.content = content.strip() + self._fingerprint = None def public_key(self) -> str: return self.content + def fingerprint(self) -> str: + if self._fingerprint: + return self._fingerprint + else: + child = subprocess.Popen( + ['ssh-keygen', '-l', '-f', '/dev/stdin'], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + ) + stdout, _ = child.communicate(self.public_key().encode('utf-8')) + if child.returncode == 0: + self._fingerprint = stdout.decode('utf-8', 'backslashreplace').strip() + return self._fingerprint + else: + raise ValueError(f'Failed to generate fingerprint: {child.returncode}.') + def __eq__(self, other): return isinstance(other, InMemoryPublicKey) and self.content == other.content @@ -168,7 +204,9 @@ def __init__(self, socket_path: typing.Optional[str] = None): self.socket_path = None else: self.socket_path = socket_path - self.shell = sh.LocalShell(additional_environ={'SSH_AUTH_SOCK': self.socket_path} if self.socket_path else None) + self.shell = sh.LocalShell( + additional_environ={'SSH_AUTH_SOCK': self.socket_path} if self.socket_path is not None else None, + ) def list(self) -> typing.List[Key]: cmd = 'ssh-add -L' @@ -274,7 +312,7 @@ def fetch_public_key_file_list( host: typing.Optional[str], user: str, generate_key_pair: bool = False -) -> typing.List[str]: +) -> typing.List[KeyFile]: """ Fetch the public key file list for the specified user on the specified host. @@ -294,7 +332,7 @@ def fetch_public_key_file_list( if not public_keys: host_str = f'@{host}' if host else ' locally' raise Error(f'No public key file found for {user}{host_str}') - return public_keys + return [KeyFile(p) for p in public_keys] def fetch_public_key_content_list( diff --git a/crmsh/ui_cluster.py b/crmsh/ui_cluster.py index 435a177415..ee6103dbae 100644 --- a/crmsh/ui_cluster.py +++ b/crmsh/ui_cluster.py @@ -410,8 +410,8 @@ def do_init(self, context, *args): help="Use the given watchdog device or driver name") parser.add_argument("-x", "--skip-csync2-sync", dest="skip_csync2", action="store_true", help="Skip csync2 initialization (an experimental option)") - parser.add_argument('--use-ssh-agent', action='store_true', dest='use_ssh_agent', - help="Use an existing key from ssh-agent instead of creating new key pairs") + parser.add_argument('--use-ssh-agent', action=argparse.BooleanOptionalAction, dest='use_ssh_agent', default=True, + help="Try to use an existing key from ssh-agent (default)") network_group = parser.add_argument_group("Network configuration", "Options for configuring the network and messaging layer.") network_group.add_argument("-i", "--interface", dest="nic_addr_list", metavar="IF", action=CustomAppendAction, default=[], help=constants.INTERFACE_HELP) @@ -513,8 +513,8 @@ def do_join(self, context, *args): parser.add_argument("-h", "--help", action="store_true", dest="help", help="Show this help message") parser.add_argument("-q", "--quiet", help="Be quiet (don't describe what's happening, just do it)", action="store_true", dest="quiet") parser.add_argument("-y", "--yes", help='Answer "yes" to all prompts (use with caution)', action="store_true", dest="yes_to_all") - parser.add_argument('--use-ssh-agent', action='store_true', dest='use_ssh_agent', - help="Use an existing key from ssh-agent instead of creating new key pairs") + parser.add_argument('--use-ssh-agent', action=argparse.BooleanOptionalAction, dest='use_ssh_agent', default=True, + help="Try to use an existing key from ssh-agent (default)") network_group = parser.add_argument_group("Network configuration", "Options for configuring the network and messaging layer.") network_group.add_argument( @@ -729,8 +729,8 @@ def do_geo_join(self, context, *args): parser.add_argument("-y", "--yes", help='Answer "yes" to all prompts (use with caution)', action="store_true", dest="yes_to_all") parser.add_argument("-c", "--cluster-node", metavar="[USER@]HOST", help="An already-configured geo cluster or arbitrator", dest="cluster_node") parser.add_argument("-s", "--clusters", help="Geo cluster description (see geo-init for details)", dest="clusters", metavar="DESC") - parser.add_argument('--use-ssh-agent', action='store_true', dest='use_ssh_agent', - help="Use an existing key from ssh-agent instead of creating new key pairs") + parser.add_argument('--use-ssh-agent', action=argparse.BooleanOptionalAction, dest='use_ssh_agent', default=True, + help="Try to use an existing key from ssh-agent (default)") options, args = parse_options(parser, args) if options is None or args is None: return @@ -768,8 +768,8 @@ def do_geo_init_arbitrator(self, context, *args): parser.add_argument("-q", "--quiet", help="Be quiet (don't describe what's happening, just do it)", action="store_true", dest="quiet") parser.add_argument("-y", "--yes", help='Answer "yes" to all prompts (use with caution)', action="store_true", dest="yes_to_all") parser.add_argument("-c", "--cluster-node", metavar="[USER@]HOST", help="An already-configured geo cluster", dest="cluster_node") - parser.add_argument('--use-ssh-agent', action='store_true', dest='use_ssh_agent', - help="Use an existing key from ssh-agent instead of creating new key pairs") + parser.add_argument('--use-ssh-agent', action=argparse.BooleanOptionalAction, dest='use_ssh_agent', default=True, + help="Try to use an existing key from ssh-agent (default)") options, args = parse_options(parser, args) if options is None or args is None: return diff --git a/crmsh/user_of_host.py b/crmsh/user_of_host.py index 041263f13d..a20d6ae0dd 100644 --- a/crmsh/user_of_host.py +++ b/crmsh/user_of_host.py @@ -51,7 +51,7 @@ def user_pair_for_ssh(self, host: str) -> typing.Tuple[str, str]: local_user = None remote_user = None try: - local_user = 'root' if self.use_ssh_agent() else self.user_of(self.this_node()) + local_user = self.user_of(self.this_node()) remote_user = self.user_of(host) return local_user, remote_user except UserNotFoundError: @@ -71,10 +71,6 @@ def user_pair_for_ssh(self, host: str) -> typing.Tuple[str, str]: else: return cached - @staticmethod - def use_ssh_agent() -> bool: - return config.get_option('core', 'no_generating_ssh_key') - @staticmethod def _get_user_of_host_from_config(host): try: diff --git a/crmsh/utils.py b/crmsh/utils.py index a267e0e443..bdeee6ea1f 100644 --- a/crmsh/utils.py +++ b/crmsh/utils.py @@ -8,7 +8,6 @@ from tempfile import mkstemp import subprocess import re -import glob import time import datetime import shutil @@ -41,8 +40,6 @@ from . import constants from . import options from . import term -from . import ssh_key -from .constants import SSH_OPTION from . import log from .prun import prun from .sh import ShellUtils @@ -136,24 +133,6 @@ def user_pair_for_ssh(host): raise ValueError('Can not create ssh session from {} to {}.'.format(this_node(), host)) -def ssh_copy_id_no_raise(local_user, remote_user, remote_node, shell: sh.LocalShell = None): - if shell is None: - shell = sh.LocalShell() - if check_ssh_passwd_need(local_user, remote_user, remote_node, shell): - local_public_key = ssh_key.fetch_public_key_file_list(None, local_user)[0] - logger.info("Configuring SSH passwordless with {}@{}".format(remote_user, remote_node)) - cmd = f"ssh-copy-id -i {local_public_key} '{remote_user}@{remote_node}' &> /dev/null" - result = shell.su_subprocess_run(local_user, cmd, tty=True) - return result.returncode - else: - return 0 - - -def ssh_copy_id(local_user, remote_user, remote_node): - if 0 != ssh_copy_id_no_raise(local_user, remote_user, remote_node): - fatal("Failed to login to remote host {}@{}".format(remote_user, remote_node)) - - @memoize def this_node(): 'returns name of this node (hostname)' @@ -3022,12 +3001,10 @@ class HostUserConfig: """ def __init__(self): self._hosts_users = dict() - self._no_generating_ssh_key = False self.load() def load(self): self._load_hosts_users() - self._load_no_generating_ssh_key() def _load_hosts_users(self): users = list() @@ -3044,13 +3021,9 @@ def _load_hosts_users(self): hosts.append(parts[1]) self._hosts_users = {host: user for user, host in zip(users, hosts)} - def _load_no_generating_ssh_key(self): - self._no_generating_ssh_key = config.get_option('core', 'no_generating_ssh_key') - def save_local(self): value = [f'{user}@{host}' for host, user in sorted(self._hosts_users.items(), key=lambda x: x[0])] config.set_option('core', 'hosts', value) - config.set_option('core', 'no_generating_ssh_key', self._no_generating_ssh_key) debug_on = config.get_option('core', 'debug') if debug_on: config.set_option('core', 'debug', 'false') @@ -3062,13 +3035,9 @@ def save_remote(self, remote_hosts: typing.Iterable[str]): self.save_local() value = [f'{user}@{host}' for host, user in sorted(self._hosts_users.items(), key=lambda x: x[0])] crmsh.parallax.parallax_call(remote_hosts, "crm options set core.hosts '{}'".format(', '.join(value))) - crmsh.parallax.parallax_call(remote_hosts, "crm options set core.no_generating_ssh_key '{}'".format( - 'yes' if self._no_generating_ssh_key else 'no' - )) def clear(self): self._hosts_users = dict() - self._no_generating_ssh_key = False def get(self, host): return self._hosts_users[host] @@ -3076,11 +3045,6 @@ def get(self, host): def add(self, user, host): self._hosts_users[host] = user - def set_no_generating_ssh_key(self, value: bool): - self._no_generating_ssh_key = value - - def get_no_generating_ssh_key(self) -> bool: - return self._no_generating_ssh_key def parse_user_at_host(s: str): i = s.find('@') diff --git a/test/features/ssh_agent.feature b/test/features/ssh_agent.feature index 381c09dc1c..d13bf6fc95 100644 --- a/test/features/ssh_agent.feature +++ b/test/features/ssh_agent.feature @@ -4,25 +4,14 @@ Feature: ssh-agent support Test ssh-agent support for crmsh Need nodes: hanode1 hanode2 hanode3 qnetd-node - Scenario: Errors are reported when ssh-agent is not avaible - When Try "crm cluster init --use-ssh-agent -y" on "hanode1" - Then Expected "Environment variable SSH_AUTH_SOCK does not exist." in stderr - When Try "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init --use-ssh-agent -y" on "hanode1" - Then Expected "Environment variable SSH_AUTH_SOCK does not exist." not in stderr - - Scenario: Errors are reported when there are no keys in ssh-agent + Scenario: Skip creating ssh key pairs when keys are available from ssh-agent Given ssh-agent is started at "/tmp/ssh-auth-sock" on nodes ["hanode1", "hanode2", "hanode3"] - When Try "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init --use-ssh-agent -y" on "hanode1" - Then Expected "ssh-add" in stderr - - Scenario: Skip creating ssh key pairs with --use-ssh-agent - Given Run "mkdir ~/ssh_disabled" OK on "hanode1,hanode2,hanode3" - And Run "mv ~/.ssh/id_* ~/ssh_disabled" OK on "hanode1,hanode2,hanode3" + And Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock ssh-add ~/.ssh/id_rsa" OK on "hanode1,hanode2,hanode3" + And Run "rm -f ~/.ssh/id_*" OK on "hanode1,hanode2,hanode3" And crm.conf poisoned on nodes ["hanode1", "hanode2", "hanode3"] - When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock ssh-add ~/ssh_disabled/id_rsa" on "hanode1,hanode2,hanode3" - And Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init --use-ssh-agent -y" on "hanode1" - And Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster join --use-ssh-agent -y -c hanode1" on "hanode2" - And Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster join --use-ssh-agent -y -c hanode1" on "hanode3" + When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init -y" on "hanode1" + And Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster join -y -c hanode1" on "hanode2" + And Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster join -y -c hanode1" on "hanode3" Then Cluster service is "started" on "hanode1" And Online nodes are "hanode1 hanode2 hanode3" # check the number of keys in authorized_keys @@ -30,20 +19,9 @@ Feature: ssh-agent support And Run "test x3 == x$(sudo awk 'END {print NR}' ~hacluster/.ssh/authorized_keys)" OK And Run "grep -E 'hosts = (root|alice)@hanode1' /root/.config/crm/crm.conf" OK on "hanode1,hanode2,hanode3" - # This test is not applicable for non-root user, since the root ssh key pair exists - @skip_non_root - Scenario: Verify expected error message when SSH_AUTH_SOCK is not set - When Try "crm cluster remove hanode3 -y" on "hanode1" - Then Expected "Environment variable SSH_AUTH_SOCK does not exist" in stderr - - Scenario: Give a warning when detected SSH_AUTH_SOCK but not using --use-ssh-agent - Given Run "crm cluster stop" OK on "hanode1,hanode2,hanode3" - When Try "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init -y" on "hanode1" - Then Expected "$SSH_AUTH_SOCK is detected. As a tip, using the --use-ssh-agent option could avoid generate local root ssh keys on cluster nodes" in stderr - - Scenario: Skip creating ssh key pairs with --use-ssh-agent and use -N + Scenario: Skip creating ssh key pairs when using -N and keys are available from ssh-agent Given Run "crm cluster stop" OK on "hanode1,hanode2,hanode3" - When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init --use-ssh-agent -y -N hanode2 -N hanode3" on "hanode1" + When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init -y -N hanode2 -N hanode3" on "hanode1" Then Cluster service is "started" on "hanode3" And Online nodes are "hanode1 hanode2 hanode3" And Run "test x1 == x$(awk 'END {print NR}' ~/.ssh/authorized_keys)" OK on "hanode3" @@ -56,9 +34,9 @@ Feature: ssh-agent support Scenario: Use qnetd Given Run "crm cluster stop" OK on "hanode1,hanode2,hanode3" - When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init --use-ssh-agent -y" on "hanode1" - And Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init qdevice --use-ssh-agent -y --qnetd-hostname qnetd-node" on "hanode1" - And Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster join --use-ssh-agent -y -c hanode1" on "hanode2" + When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init -y" on "hanode1" + And Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init qdevice -y --qnetd-hostname qnetd-node" on "hanode1" + And Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster join -y -c hanode1" on "hanode2" Then Cluster service is "started" on "hanode1" And Online nodes are "hanode1 hanode2" And Service "corosync-qdevice" is "started" on "hanode1" @@ -67,7 +45,7 @@ Feature: ssh-agent support Scenario: Use qnetd with -N Given Run "crm cluster stop" OK on "hanode1,hanode2" - When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init --use-ssh-agent -y -N hanode2 --qnetd-hostname qnetd-node" on "hanode1" + When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init -y -N hanode2 --qnetd-hostname qnetd-node" on "hanode1" Then Cluster service is "started" on "hanode1" And Online nodes are "hanode1 hanode2" And Service "corosync-qdevice" is "started" on "hanode1" @@ -80,19 +58,19 @@ Feature: ssh-agent support And Cluster service is "stopped" on "hanode1" And Cluster service is "stopped" on "hanode2" And crm.conf poisoned on nodes ["hanode1", "hanode2", "hanode3"] - When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init -y -n cluster1 --use-ssh-agent" on "hanode1" + When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init -y -n cluster1" on "hanode1" Then Cluster service is "started" on "hanode1" When Run "crm configure primitive vip IPaddr2 params ip=@vip.0" on "hanode1" - When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init -y -n cluster2 --use-ssh-agent" on "hanode2" + When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster init -y -n cluster2" on "hanode2" Then Cluster service is "started" on "hanode2" When Run "crm configure primitive vip IPaddr2 params ip=@vip.1" on "hanode2" When Run "crm cluster geo_init -y --clusters "cluster1=@vip.0 cluster2=@vip.1" --tickets tickets-geo --arbitrator hanode3" on "hanode1" - When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster geo_join -y --use-ssh-agent --cluster-node hanode1 --clusters "cluster1=@vip.0 cluster2=@vip.1"" on "hanode2" + When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster geo_join -y --cluster-node hanode1 --clusters "cluster1=@vip.0 cluster2=@vip.1"" on "hanode2" Given Service "booth@booth" is "stopped" on "hanode3" - When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster geo_init_arbitrator -y --use-ssh-agent --cluster-node hanode1" on "hanode3" + When Run "SSH_AUTH_SOCK=/tmp/ssh-auth-sock crm cluster geo_init_arbitrator -y --cluster-node hanode1" on "hanode3" Then Service "booth@booth" is "started" on "hanode3" When Run "crm resource start g-booth" on "hanode1" Then Show cluster status on "hanode1" diff --git a/test/features/steps/const.py b/test/features/steps/const.py index 6e34c6f492..4846f611fb 100644 --- a/test/features/steps/const.py +++ b/test/features/steps/const.py @@ -79,8 +79,8 @@ Use the given watchdog device or driver name -x, --skip-csync2-sync Skip csync2 initialization (an experimental option) - --use-ssh-agent Use an existing key from ssh-agent instead of creating - new key pairs + --use-ssh-agent, --no-use-ssh-agent + Try to use an existing key from ssh-agent (default) Network configuration: Options for configuring the network and messaging layer. @@ -231,8 +231,8 @@ -h, --help Show this help message -q, --quiet Be quiet (don't describe what's happening, just do it) -y, --yes Answer "yes" to all prompts (use with caution) - --use-ssh-agent Use an existing key from ssh-agent instead of creating - new key pairs + --use-ssh-agent, --no-use-ssh-agent + Try to use an existing key from ssh-agent (default) Network configuration: Options for configuring the network and messaging layer. @@ -341,8 +341,8 @@ -c, --cluster-node [USER@]HOST An already-configured geo cluster or arbitrator -s, --clusters DESC Geo cluster description (see geo-init for details) - --use-ssh-agent Use an existing key from ssh-agent instead of creating - new key pairs''' + --use-ssh-agent, --no-use-ssh-agent + Try to use an existing key from ssh-agent (default)''' CRM_CLUSTER_GEO_INIT_ARBIT_H_OUTPUT = '''Initialize node as geo cluster arbitrator @@ -359,8 +359,8 @@ -y, --yes Answer "yes" to all prompts (use with caution) -c, --cluster-node [USER@]HOST An already-configured geo cluster - --use-ssh-agent Use an existing key from ssh-agent instead of creating - new key pairs''' + --use-ssh-agent, --no-use-ssh-agent + Try to use an existing key from ssh-agent (default)''' CRM_CONF_CONTENT_POSIONED = '''[core] hosts = alan@hanode1, claude@hanode2, john@hanode3''' diff --git a/test/run-functional-tests b/test/run-functional-tests index 58bfe82a7a..1cae26fc84 100755 --- a/test/run-functional-tests +++ b/test/run-functional-tests @@ -372,7 +372,7 @@ run_origin_regression_test() { prepare_coverage_env() { for node in $*; do - podman cp "$PROJECT_PATH"/test/features/coverage/sitecustomize.py "$node":/usr/lib/python3.11/site-packages/sitecustomize.py + podman cp "$PROJECT_PATH"/test/features/coverage/sitecustomize.py "$node":/usr/lib/python3.13/site-packages/sitecustomize.py done } diff --git a/test/unittests/test_bootstrap.py b/test/unittests/test_bootstrap.py index 04fbac3a22..c0efa4a3dd 100644 --- a/test/unittests/test_bootstrap.py +++ b/test/unittests/test_bootstrap.py @@ -26,7 +26,7 @@ except ImportError: import mock -from crmsh import bootstrap +from crmsh import bootstrap, cibquery from crmsh import constants from crmsh import qdevice from crmsh import sbd @@ -200,7 +200,7 @@ def test_validate_cluster_node_same_name(self, mock_ip_in_local, mock_gethost, m mock_ip_in_local.return_value = True with self.assertRaises(SystemExit): ctx._validate_cluster_node() - mock_fatal.assert_called_once_with("Please specify peer node's hostname or IP address") + mock_fatal.assert_called_once_with("\"me\" is the local node. Please specify peer node's hostname or IP address") @mock.patch('crmsh.utils.fatal') @mock.patch('socket.gethostbyname') @@ -524,15 +524,16 @@ def test_change_user_shell(self, mock_nologin, mock_invoke): mock_nologin.assert_called_once_with("hacluster", None) mock_invoke.assert_called_once_with("usermod -s /bin/bash hacluster", None) - @mock.patch('crmsh.sh.LocalShell.su_subprocess_run') - def test_generate_ssh_key_pair_on_remote(self, mock_su: mock.MagicMock): - mock_su.return_value = mock.Mock(returncode=0, stdout=b'') - bootstrap.generate_ssh_key_pair_on_remote('local_sudoer', 'remote_host', 'remote_sudoer', 'remote_user') - mock_su.assert_has_calls([ + def test_generate_ssh_key_pair_on_remote(self): + mock_shell = mock.Mock(crmsh.sh.LocalShell) + mock_shell.su_subprocess_run.return_value = mock.Mock(returncode=0, stdout=b'') + bootstrap.generate_ssh_key_pair_on_remote(mock_shell, 'local_sudoer', 'remote_host', 'remote_sudoer', 'remote_user') + mock_shell.su_subprocess_run.assert_has_calls([ mock.call( 'local_sudoer', 'ssh -o StrictHostKeyChecking=no remote_sudoer@remote_host sudo -H -u remote_user /bin/sh', input=f''' +set -e key_types=({ ' '.join(crmsh.ssh_key.KeyFileManager.KNOWN_KEY_TYPES) }) for key_type in "${{key_types[@]}}"; do priv_key_file=~/.ssh/id_${{key_type}} @@ -596,7 +597,7 @@ def _test_configure_ssh_key(self, mock_change_shell, mock_key_files, mock_detect @mock.patch('crmsh.ssh_key.KeyFileManager.ensure_key_pair_exists_for_user') @mock.patch('crmsh.bootstrap.change_user_shell') def test_configure_ssh_key(self, mock_change_user_shell, mock_ensure_key_pair, mock_add): - public_key = crmsh.ssh_key.InMemoryPublicKey('foo') + public_key = crmsh.ssh_key.InMemoryPublicKey('ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJh4fv0ULZFXo9nWS/Li8g/t1yFqXjFEFECPe/O7KfPQ') mock_ensure_key_pair.return_value = (True, [public_key]) bootstrap.configure_ssh_key('alice') mock_change_user_shell.assert_called_once_with('alice') @@ -610,54 +611,115 @@ def test_join_ssh_no_seed_host(self, mock_error): bootstrap.join_ssh(None, None) mock_error.assert_called_once_with("No existing IP/hostname specified (use -c option)") - @mock.patch('crmsh.bootstrap.detect_cluster_service_on_node') - @mock.patch('crmsh.bootstrap.get_node_canonical_hostname') + + @mock.patch('crmsh.bootstrap.setup_passwordless_with_other_nodes') @mock.patch('crmsh.bootstrap.swap_public_ssh_key_for_secondary_user') + @mock.patch('crmsh.sh.cluster_shell') @mock.patch('crmsh.bootstrap.change_user_shell') - @mock.patch('crmsh.sh.SSHShell') - @mock.patch('crmsh.bootstrap.swap_public_ssh_key') - @mock.patch('crmsh.utils.ssh_copy_id_no_raise') @mock.patch('crmsh.bootstrap.configure_ssh_key') + @mock.patch('crmsh.bootstrap.get_node_canonical_hostname') + @mock.patch('crmsh.bootstrap.detect_cluster_service_on_node') + @mock.patch('crmsh.utils.HostUserConfig') + @mock.patch('crmsh.ssh_key.AuthorizedKeyManager') + @mock.patch('crmsh.sh.SSHShell') + @mock.patch('crmsh.bootstrap.ssh_copy_id_no_raise') @mock.patch('crmsh.sh.LocalShell') - @mock.patch('crmsh.service_manager.ServiceManager.start_service') + @mock.patch('os.environ') + @mock.patch('crmsh.service_manager.ServiceManager') def test_join_ssh( self, - mock_start_service, mock_local_shell, mock_config_ssh, mock_ssh_copy_id, mock_swap, + mock_service_manager, + mock_environ, + mock_local_shell, + mock_ssh_copy_id_no_raise, mock_ssh_shell, - mock_change, mock_swap_2, - mock_get_node_cononical_hostname, - mock_detect_cluster_service_on_node + mock_authorized_key_manager, + mock_host_user_config, + mock_detect_cluster_service_on_node, + mock_get_node_canonical_hostname, + mock_configure_ssh_key, + mock_change_user_shell, + mock_cluster_shell_fn, + mock_swap_public_ssh_key_for_secondary_user, + mock_setup_passwordless_with_other_nodes, ): - bootstrap._context = mock.Mock(current_user="bob", default_nic="eth1", use_ssh_agent=False, stage=None) - mock_swap.return_value = None - mock_ssh_copy_id.return_value = 0 - mock_subprocess_run_without_input = mock_ssh_shell.return_value.subprocess_run_without_input - mock_subprocess_run_without_input.return_value = mock.Mock(returncode=0) - mock_get_node_cononical_hostname.return_value='node1' - - bootstrap.join_ssh("node1", "alice") - - mock_start_service.assert_called_once_with("sshd.service", enable=True) - mock_local_shell: mock.MagicMock - mock_local_shell.assert_has_calls([ - mock.call(additional_environ={'SSH_AUTH_SOCK': ''}), - ]) - mock_config_ssh.assert_has_calls([ - mock.call("bob"), - mock.call("hacluster"), - ]) - mock_ssh_copy_id.assert_called_once_with("bob", "alice", "node1", mock_local_shell.return_value) - mock_subprocess_run_without_input.assert_called_once_with( - 'node1', 'alice', 'sudo true', + ssh_key = mock.Mock(crmsh.ssh_key.InMemoryPublicKey) + ssh_key.fingerprint.return_value = 'foo' + mock_environ.get.return_value = '/nonexist' + mock_ssh_copy_id_no_raise.return_value = crmsh.bootstrap.SshCopyIdResult( + 0, [ssh_key], + ) + mock_ssh_shell.return_value.subprocess_run_without_input.return_value = mock.Mock(returncode=0) + mock_get_node_canonical_hostname.return_value = 'host1' + crmsh.bootstrap.join_ssh_impl('alice', 'node1', 'bob', [ssh_key]) + mock_environ.get.assert_called_with('SSH_AUTH_SOCK') + mock_local_shell.assert_called_with(additional_environ={'SSH_AUTH_SOCK': '/nonexist'}) + mock_ssh_copy_id_no_raise.assert_called_once_with('alice', 'bob', 'node1', mock_local_shell.return_value) + mock_ssh_shell.assert_called_once_with(mock_local_shell.return_value, 'alice') + mock_authorized_key_manager.assert_called_once_with(mock_ssh_shell.return_value) + mock_authorized_key_manager.return_value.add.assert_called_once_with(None, 'alice', ssh_key) + mock_ssh_shell.return_value.subprocess_run_without_input.assert_called_once_with( + 'node1', 'bob', 'sudo true', stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, ) - mock_swap.assert_called_once_with("node1", "bob", "alice", "bob", "alice") - mock_swap_2.assert_called_once() - args, kwargs = mock_swap_2.call_args - self.assertEqual(3, len(args)) - self.assertEqual('node1', args[1]) - self.assertEqual('hacluster', args[2]) + mock_host_user_config.return_value.add.assert_called_with('bob', 'host1') + mock_configure_ssh_key.assert_called_once_with('hacluster') + mock_change_user_shell.assert_called_once_with('hacluster') + mock_swap_public_ssh_key_for_secondary_user.assert_called_once_with( + mock_cluster_shell_fn.return_value, 'node1', 'hacluster', + ) + + + @mock.patch('crmsh.bootstrap.setup_passwordless_with_other_nodes') + @mock.patch('crmsh.bootstrap.swap_public_ssh_key_for_secondary_user') + @mock.patch('crmsh.sh.cluster_shell') + @mock.patch('crmsh.bootstrap.change_user_shell') + @mock.patch('crmsh.bootstrap.configure_ssh_key') + @mock.patch('crmsh.bootstrap.get_node_canonical_hostname') + @mock.patch('crmsh.bootstrap.detect_cluster_service_on_node') + @mock.patch('crmsh.utils.HostUserConfig') + @mock.patch('crmsh.ssh_key.AuthorizedKeyManager') + @mock.patch('crmsh.sh.SSHShell') + @mock.patch('crmsh.bootstrap.ssh_copy_id_no_raise') + @mock.patch('crmsh.sh.LocalShell') + @mock.patch('os.environ') + @mock.patch('crmsh.service_manager.ServiceManager') + def test_join_ssh_bad_credential( + self, + mock_service_manager, + mock_environ, + mock_local_shell, + mock_ssh_copy_id_no_raise, + mock_ssh_shell, + mock_authorized_key_manager, + mock_host_user_config, + mock_detect_cluster_service_on_node, + mock_get_node_canonical_hostname, + mock_configure_ssh_key, + mock_change_user_shell, + mock_cluster_shell_fn, + mock_swap_public_ssh_key_for_secondary_user, + mock_setup_passwordless_with_other_nodes, + ): + ssh_key = mock.Mock(crmsh.ssh_key.InMemoryPublicKey) + ssh_key.fingerprint.return_value = 'foo' + mock_environ.get.side_effect = ['/nonexist', 'alice'] + mock_ssh_copy_id_no_raise.return_value = crmsh.bootstrap.SshCopyIdResult( + 255, list(), + ) + with self.assertRaises(ValueError): + crmsh.bootstrap.join_ssh_impl('alice', 'node1', 'bob', [ssh_key]) + mock_environ.get.assert_called_with('SUDO_USER') + mock_local_shell.assert_called_with(additional_environ={'SSH_AUTH_SOCK': '/nonexist'}) + mock_ssh_copy_id_no_raise.assert_called_once_with('alice', 'bob', 'node1', mock_local_shell.return_value) + mock_ssh_shell.assert_not_called() + mock_authorized_key_manager.assert_not_called() + mock_host_user_config.return_value.add.assert_not_called() + mock_configure_ssh_key.assert_not_called() + mock_change_user_shell.assert_not_called() + mock_swap_public_ssh_key_for_secondary_user.assert_not_called() + @mock.patch('crmsh.ssh_key.AuthorizedKeyManager.add') @mock.patch('crmsh.ssh_key.KeyFile.public_key') @@ -692,33 +754,6 @@ def test_swap_public_ssh_key_for_secondary_user( ]) mock_log_info.assert_called_with("A new ssh keypair is generated for user %s@%s.", 'alice', 'node1') - @mock.patch('crmsh.bootstrap.change_user_shell') - @mock.patch('crmsh.sh.LocalShell.get_stdout_or_raise_error') - @mock.patch('crmsh.bootstrap.swap_public_ssh_key') - @mock.patch('crmsh.utils.ssh_copy_id_no_raise') - @mock.patch('crmsh.bootstrap.configure_ssh_key') - @mock.patch('crmsh.sh.LocalShell') - @mock.patch('crmsh.service_manager.ServiceManager.start_service') - def test_join_ssh_bad_credential(self, mock_start_service, mock_local_shell, mock_config_ssh, mock_ssh_copy_id, mock_swap, mock_invoke, mock_change): - bootstrap._context = mock.Mock(current_user="bob", default_nic="eth1", use_ssh_agent=False) - mock_invoke.return_value = '' - mock_swap.return_value = None - mock_ssh_copy_id.return_value = 255 - - with self.assertRaises(ValueError): - bootstrap.join_ssh("node1", "alice") - - mock_start_service.assert_called_once_with("sshd.service", enable=True) - mock_local_shell.assert_has_calls([ - mock.call(additional_environ={'SSH_AUTH_SOCK': ''}), - ]) - mock_config_ssh.assert_has_calls([ - mock.call("bob"), - ]) - mock_ssh_copy_id.assert_called_once_with("bob", "alice", "node1", mock_local_shell.return_value) - mock_swap.assert_not_called() - mock_invoke.assert_not_called() - @mock.patch('crmsh.utils.this_node') def test_bootstrap_add_return(self, mock_this_node): ctx = mock.Mock(user_at_node_list=[], use_ssh_agent=False) @@ -730,7 +765,10 @@ def test_bootstrap_add_return(self, mock_this_node): @mock.patch('crmsh.utils.this_node') def test_bootstrap_add(self, mock_this_node, mock_info, mock_run): mock_interfaces_inst = mock.Mock(input_nic_list=["eth1", "eth2"]) - ctx = mock.Mock(current_user="alice", user_at_node_list=["bob@node2", "carol@node3"], nic_list=["eth1"], use_ssh_agent=False, interfaces_inst=mock_interfaces_inst) + ctx = mock.Mock( + current_user="alice", user_at_node_list=["bob@node2", "carol@node3"], nic_list=["eth1"], + use_ssh_agent=True, interfaces_inst=mock_interfaces_inst, + ) mock_this_node.return_value = "node1" bootstrap.bootstrap_add(ctx) mock_info.assert_has_calls([ @@ -740,92 +778,185 @@ def test_bootstrap_add(self, mock_this_node, mock_info, mock_run): mock.call("Running command on node3: crm cluster join -y -i eth1 -i eth2 -c alice@node1") ]) - @mock.patch('crmsh.utils.fatal') - @mock.patch('crmsh.sh.ClusterShell.get_rc_stdout_stderr_without_input') - def test_setup_passwordless_with_other_nodes_failed_fetch_nodelist(self, mock_run, mock_error): - bootstrap._context = mock.Mock(current_user="carol", use_ssh_agent=False) - mock_run.return_value = (1, None, None) - mock_error.side_effect = SystemExit - - with self.assertRaises(SystemExit): - bootstrap.setup_passwordless_with_other_nodes("node1", "alice") - - mock_run.assert_called_once_with('node1', 'crm_node -l') - mock_error.assert_called_once_with("Can't fetch cluster nodes list from node1: None") + @mock.patch('crmsh.sh.ClusterShell.get_stdout_or_raise_error') + @mock.patch('logging.Logger.info') + @mock.patch('crmsh.utils.this_node') + def test_bootstrap_add_no_ssh_agent(self, mock_this_node, mock_info, mock_run): + mock_interfaces_inst = mock.Mock(input_nic_list=["eth1", "eth2"]) + ctx = mock.Mock( + current_user="alice", user_at_node_list=["bob@node2", "carol@node3"], nic_list=["eth1"], + use_ssh_agent=False, interfaces_inst=mock_interfaces_inst, + ) + mock_this_node.return_value = "node1" + bootstrap.bootstrap_add(ctx) + mock_info.assert_has_calls([ + mock.call("Adding node node2 to cluster"), + mock.call("Running command on node2: crm cluster join -y -i eth1 -i eth2 --no-use-ssh-agent -c alice@node1"), + mock.call("Adding node node3 to cluster"), + mock.call("Running command on node3: crm cluster join -y -i eth1 -i eth2 --no-use-ssh-agent -c alice@node1") + ]) - @mock.patch('crmsh.utils.fatal') + @mock.patch('crmsh.bootstrap.swap_key_for_hacluster') + @mock.patch('crmsh.bootstrap.swap_public_ssh_key') + @mock.patch('crmsh.bootstrap.change_user_shell') + @mock.patch('crmsh.bootstrap._merge_ssh_authorized_keys') + @mock.patch('crmsh.bootstrap.ssh_copy_id_no_raise') + @mock.patch('crmsh.utils.user_of') + @mock.patch('crmsh.bootstrap._fetch_core_hosts') + @mock.patch('crmsh.utils.this_node') @mock.patch('crmsh.utils.HostUserConfig') + @mock.patch('crmsh.cibquery.get_cluster_nodes') + @mock.patch('crmsh.user_of_host.UserOfHost') + @mock.patch('lxml.etree.fromstring') + @mock.patch('crmsh.sh.ClusterShell') + @mock.patch('crmsh.sh.LocalShell') + @mock.patch('os.environ') + @mock.patch('crmsh.bootstrap._context', current_user='carol', use_ssh_agent=True) + def test_setup_passwordless_with_other_nodes_failed_fetch_node_list( + self, + mock_context, + mock_env, + mock_local_shell, + mock_cluster_shell, + mock_lxml_etree_fromstring, + mock_user_of_host, + mock_get_cluster_nodes, + mock_host_user_config, + mock_this_node, + mock_fetch_core_hosts, + mock_user_of, + mock_ssh_copy_id, + mock_merge_ssh_authorized_keys, + mock_change_user_shell, + mock_swap_public_ssh_key, + mock_swap_key_for_hacluster, + ): + mock_env.get.return_value = '' + mock_cluster_shell.return_value.get_rc_stdout_stderr_without_input.return_value = (255, '', 'foo') + with self.assertRaises(ValueError) as e: + bootstrap.setup_passwordless_with_other_nodes('node1') + self.assertEqual("Can't fetch cluster nodes list from node1: foo", str(e.exception)) + mock_local_shell.assert_called_once_with(additional_environ={'SSH_AUTH_SOCK': ''}) + mock_cluster_shell.assert_called_once_with(mock_local_shell.return_value, mock_user_of_host.instance.return_value, True, True) + mock_get_cluster_nodes.assert_not_called() + + @mock.patch('crmsh.bootstrap.swap_key_for_hacluster') + @mock.patch('crmsh.bootstrap.swap_public_ssh_key') + @mock.patch('crmsh.bootstrap.change_user_shell') + @mock.patch('crmsh.bootstrap._merge_ssh_authorized_keys') + @mock.patch('crmsh.bootstrap.ssh_copy_id_no_raise') + @mock.patch('crmsh.utils.user_of') @mock.patch('crmsh.bootstrap._fetch_core_hosts') - @mock.patch('crmsh.sh.ClusterShell.get_rc_stdout_stderr_without_input') + @mock.patch('crmsh.utils.this_node') + @mock.patch('crmsh.utils.HostUserConfig') + @mock.patch('crmsh.cibquery.get_cluster_nodes') + @mock.patch('crmsh.user_of_host.UserOfHost') + @mock.patch('lxml.etree.fromstring') + @mock.patch('crmsh.sh.ClusterShell') + @mock.patch('crmsh.sh.LocalShell') + @mock.patch('os.environ') + @mock.patch('crmsh.bootstrap._context', current_user='carol', use_ssh_agent=True) def test_setup_passwordless_with_other_nodes_failed_fetch_hostname( self, - mock_run, + mock_context, + mock_env, + mock_local_shell, + mock_cluster_shell, + mock_lxml_etree_fromstring, + mock_user_of_host, + mock_get_cluster_nodes, + mock_host_user_config, + mock_this_node, mock_fetch_core_hosts, - mock_host_user_config_class, - mock_error, + mock_user_of, + mock_ssh_copy_id, + mock_merge_ssh_authorized_keys, + mock_change_user_shell, + mock_swap_public_ssh_key, + mock_swap_key_for_hacluster, ): - bootstrap._context = mock.Mock(current_user="carol", use_ssh_agent=False) - out_node_list = """1 node1 member - 2 node2 member""" - mock_run.side_effect = [ - (0, out_node_list, None), - (1, None, None) - ] - mock_fetch_core_hosts.return_value = (["alice", "bob"], ["node1", "node2"]) - mock_error.side_effect = SystemExit - - with self.assertRaises(SystemExit): - bootstrap.setup_passwordless_with_other_nodes("node1", "alice") - - mock_run.assert_has_calls([ - mock.call('node1', 'crm_node -l'), - mock.call('node1', 'hostname'), + # conditions + mock_env.get.return_value = '' + mock_cluster_shell.return_value.get_rc_stdout_stderr_without_input.side_effect = [ + (0, '', ''), + (1, '', 'foo'), + ] + mock_get_cluster_nodes.return_value = [cibquery.ClusterNode(1, 'node1'), cibquery.ClusterNode(2, 'node2')] + mock_this_node.return_value = 'node3' + mock_fetch_core_hosts.return_value = (['alice', 'bob'], ['node1', 'node2']) + with self.assertRaises(ValueError) as e: + bootstrap.setup_passwordless_with_other_nodes('node1') + self.assertEqual("Can't fetch hostname of node1: foo", str(e.exception)) + # assertions + mock_local_shell.assert_called_once_with(additional_environ={'SSH_AUTH_SOCK': ''}) + mock_cluster_shell.assert_called_once_with(mock_local_shell.return_value, mock_user_of_host.instance.return_value, True, True) + mock_host_user_config.return_value.add.assert_has_calls([ + mock.call('carol', 'node3'), + mock.call('alice', 'node1'), + mock.call('bob', 'node2'), ]) - mock_error.assert_called_once_with("Can't fetch hostname of node1: None") + mock_host_user_config.return_value.save_local.assert_called_once_with() + mock_ssh_copy_id.assert_not_called() @mock.patch('crmsh.bootstrap.swap_key_for_hacluster') + @mock.patch('crmsh.bootstrap.swap_public_ssh_key') @mock.patch('crmsh.bootstrap.change_user_shell') - @mock.patch('crmsh.utils.HostUserConfig') - @mock.patch('crmsh.bootstrap._fetch_core_hosts') - @mock.patch('crmsh.utils.ssh_copy_id') + @mock.patch('crmsh.bootstrap._merge_ssh_authorized_keys') + @mock.patch('crmsh.bootstrap.ssh_copy_id_no_raise') @mock.patch('crmsh.utils.user_of') - @mock.patch('crmsh.bootstrap.swap_public_ssh_key') - @mock.patch('crmsh.sh.ClusterShell.get_rc_stdout_stderr_without_input') + @mock.patch('crmsh.bootstrap._fetch_core_hosts') + @mock.patch('crmsh.utils.this_node') + @mock.patch('crmsh.utils.HostUserConfig') + @mock.patch('crmsh.cibquery.get_cluster_nodes') + @mock.patch('crmsh.user_of_host.UserOfHost') + @mock.patch('lxml.etree.fromstring') + @mock.patch('crmsh.sh.ClusterShell') + @mock.patch('crmsh.sh.LocalShell') + @mock.patch('os.environ') + @mock.patch('crmsh.bootstrap._context', current_user='carol', use_ssh_agent=True) def test_setup_passwordless_with_other_nodes( self, - mock_run, - mock_swap, - mock_userof, - mock_ssh_copy_id: mock.MagicMock, + mock_context, + mock_env, + mock_local_shell, + mock_cluster_shell, + mock_lxml_etree_fromstring, + mock_user_of_host, + mock_get_cluster_nodes, + mock_host_user_config, + mock_this_node, mock_fetch_core_hosts, - mock_host_user_config_class, - mock_change_shell, - mock_swap_hacluster + mock_user_of, + mock_ssh_copy_id, + mock_merge_ssh_authorized_keys, + mock_change_user_shell, + mock_swap_public_ssh_key, + mock_swap_key_for_hacluster, ): - bootstrap._context = mock.Mock(current_user="carol", use_ssh_agent=False) - mock_fetch_core_hosts.return_value = (["alice", "bob"], ["node1", "node2"]) - mock_userof.return_value = "bob" - out_node_list = """1 node1 member - 2 node2 member""" - mock_run.side_effect = [ - (0, out_node_list, None), - (0, "node1", None) - ] - - bootstrap.setup_passwordless_with_other_nodes("node1", "alice") - - mock_run.assert_has_calls([ - mock.call('node1', 'crm_node -l'), - mock.call('node1', 'hostname'), - ]) - mock_userof.assert_called_once_with("node2") - mock_ssh_copy_id.assert_has_calls([ - mock.call('carol', 'bob', 'node2') + # conditions + mock_env.get.return_value = '' + mock_cluster_shell.return_value.get_rc_stdout_stderr_without_input.return_value = (0, 'node1', '') + mock_get_cluster_nodes.return_value = [cibquery.ClusterNode(1, 'node1'), cibquery.ClusterNode(2, 'node2')] + mock_this_node.return_value = 'node3' + mock_fetch_core_hosts.return_value = (['alice', 'bob'], ['node1', 'node2']) + mock_user_of.return_value = 'foo' + mock_ssh_copy_id.return_value = mock.Mock(returncode = 0) + bootstrap.setup_passwordless_with_other_nodes('node1') + # assertions + mock_local_shell.assert_called_once_with(additional_environ={'SSH_AUTH_SOCK': ''}) + mock_cluster_shell.assert_called_once_with(mock_local_shell.return_value, mock_user_of_host.instance.return_value, True, True) + mock_host_user_config.return_value.add.assert_has_calls([ + mock.call('carol', 'node3'), + mock.call('alice', 'node1'), + mock.call('bob', 'node2'), ]) - mock_swap.assert_has_calls([ - mock.call('node2', "carol", "bob", "carol", "bob"), - mock.call('node2', 'hacluster', 'hacluster', 'carol', 'bob') - ]) + mock_host_user_config.return_value.save_local.assert_called_once_with() + mock_ssh_copy_id.assert_called_once_with('carol', 'foo', 'node2', mock_local_shell.return_value) + mock_merge_ssh_authorized_keys.assert_called_once_with(['node1', 'node2']) + mock_change_user_shell.assert_called_once_with('hacluster', 'node2') + mock_swap_public_ssh_key.assert_called_once_with('node2', 'hacluster', 'hacluster', 'carol', 'foo', mock_local_shell.return_value) + mock_swap_key_for_hacluster.assert_called_once_with(['node1', 'node2']) + mock_host_user_config.return_value.save_remote.assert_called_once_with(['node1', 'node2']) @mock.patch('crmsh.sh.ClusterShell.get_rc_stdout_stderr_without_input') def test_get_node_canonical_hostname(self, mock_run): @@ -982,7 +1113,7 @@ def test_init_network_input(self, mock_interface_info): @mock.patch('crmsh.ssh_key.InMemoryPublicKey') @mock.patch('crmsh.ssh_key.fetch_public_key_content_list') @mock.patch('crmsh.utils.this_node') - @mock.patch('crmsh.utils.ssh_copy_id_no_raise') + @mock.patch('crmsh.bootstrap.ssh_copy_id_no_raise') @mock.patch('crmsh.utils.check_ssh_passwd_need') @mock.patch('crmsh.bootstrap.UserOfHost.instance') @mock.patch('crmsh.bootstrap._select_user_pair_for_ssh_for_secondary_components') @@ -991,9 +1122,8 @@ def test_setup_passwordless_ssh_for_qnetd_add_keys(self, mock_select, mock_user_ mock_select.side_effect = [("bob", "bob", "qnetd-node"), ("bob", "bob", "node2")] mock_user_of_host_instance = mock.Mock() mock_user_of_host.return_value = mock_user_of_host_instance - mock_user_of_host_instance.use_ssh_agent.return_value = False mock_check_passwd.return_value = True - mock_ssh_copy_id.return_value = 0 + mock_ssh_copy_id.return_value = mock.Mock(returncode=0) mock_this_node.return_value = "node1" mock_remote_public_key_from.return_value = ["public_key"] mock_in_memory_public_key.return_value = "public_key" @@ -1009,42 +1139,6 @@ def test_setup_passwordless_ssh_for_qnetd_add_keys(self, mock_select, mock_user_ mock.call('node2') ]) - @mock.patch('crmsh.utils.this_node') - @mock.patch('crmsh.utils.HostUserConfig') - @mock.patch('os.environ.get') - @mock.patch('crmsh.sh.SSHShell') - @mock.patch('crmsh.sh.LocalShell') - @mock.patch('crmsh.ssh_key.AuthorizedKeyManager') - @mock.patch('crmsh.ssh_key.AgentClient') - @mock.patch('logging.Logger.info') - @mock.patch('crmsh.bootstrap.UserOfHost.instance') - @mock.patch('crmsh.bootstrap._select_user_pair_for_ssh_for_secondary_components') - def test_setup_passwordless_ssh_for_qnetd_ssh_agent(self, mock_select, mock_user_of_host, mock_info, mock_agent, mock_authorized_key_manager, mock_local_shell, mock_ssh_shell, mock_get, mock_host_user_config_class, mock_this_node): - bootstrap._context = mock.Mock(qnetd_addr_input="user@qnetd-node") - mock_select.return_value = ("bob", "bob", "qnetd-node") - mock_user_of_host_instance = mock.Mock() - mock_user_of_host.return_value = mock_user_of_host_instance - mock_user_of_host_instance.use_ssh_agent.return_value = True - mock_agent_instance = mock.Mock() - mock_agent.return_value = mock_agent_instance - key_in_memory = mock.MagicMock(crmsh.ssh_key.InMemoryPublicKey) - mock_agent_instance.list.return_value = [key_in_memory] - mock_authorized_key_manager_instance = mock.Mock() - mock_authorized_key_manager.return_value = mock_authorized_key_manager_instance - mock_get.return_value = "/ssh-agent-path" - mock_local_shell_instance = mock.Mock() - mock_local_shell.return_value = mock_local_shell_instance - mock_ssh_shell_instance = mock.Mock() - mock_ssh_shell.return_value = mock_ssh_shell_instance - mock_this_node.return_value = "node1" - mock_host_uesr_config_instance = mock.Mock() - mock_host_user_config_class.return_value = mock_host_uesr_config_instance - - bootstrap._setup_passwordless_ssh_for_qnetd(["node1", "node2"]) - - mock_info.assert_called_once_with("Adding public keys to authorized_keys for user root...") - mock_authorized_key_manager_instance.add.assert_called_once_with("qnetd-node", "bob", key_in_memory) - @mock.patch('crmsh.service_manager.ServiceManager.disable_service') @mock.patch('logging.Logger.info') def test_init_qdevice_no_config(self, mock_status, mock_disable): @@ -1074,7 +1168,6 @@ def test_init_qdevice_already_configured( bootstrap._context = mock.Mock(qdevice_inst=self.qdevice_with_ip, current_user="bob") mock_ssh.return_value = False mock_user_of_host.return_value = mock.MagicMock(crmsh.user_of_host.UserOfHost) - mock_user_of_host.return_value.use_ssh_agent.return_value = False mock_qdevice_configured.return_value = True mock_confirm.return_value = False self.qdevice_with_ip.start_qdevice_service = mock.Mock() @@ -1085,7 +1178,7 @@ def test_init_qdevice_already_configured( mock_status.assert_called_once_with("Configure Qdevice/Qnetd:") mock_local_shell.assert_has_calls([ mock.call(additional_environ={'SSH_AUTH_SOCK': ''}), - mock.call(), + mock.call(additional_environ={'SSH_AUTH_SOCK': ''}), ]) mock_ssh.assert_called_once_with("bob", "bob", "qnetd-node", mock_local_shell.return_value) mock_configure_ssh_key.assert_not_called() @@ -1114,7 +1207,6 @@ def test_init_qdevice(self, mock_info, mock_local_shell, mock_ssh, mock_configur mock_list_nodes.return_value = [] mock_ssh.return_value = False mock_user_of_host.return_value = mock.MagicMock(crmsh.user_of_host.UserOfHost) - mock_user_of_host.return_value.use_ssh_agent.return_value = False mock_qdevice_configured.return_value = False self.qdevice_with_ip.set_cluster_name = mock.Mock() self.qdevice_with_ip.valid_qnetd = mock.Mock() @@ -1126,7 +1218,7 @@ def test_init_qdevice(self, mock_info, mock_local_shell, mock_ssh, mock_configur mock_info.assert_called_once_with("Configure Qdevice/Qnetd:") mock_local_shell.assert_has_calls([ mock.call(additional_environ={'SSH_AUTH_SOCK': ''}), - mock.call(), + mock.call(additional_environ={'SSH_AUTH_SOCK': ''}), ]) mock_ssh.assert_called_once_with("bob", "bob", "qnetd-node", mock_local_shell.return_value) mock_host_user_config_class.return_value.add.assert_has_calls([ diff --git a/test/unittests/test_prun.py b/test/unittests/test_prun.py index 7e987bf1d7..a73ac14a49 100644 --- a/test/unittests/test_prun.py +++ b/test/unittests/test_prun.py @@ -40,14 +40,14 @@ def test_prun( ]) mock_runner_add_task.assert_has_calls([ mock.call(TaskArgumentsEq( - ['su', 'alice', '--login', '-c', 'ssh {} bob@host1 sudo -H /bin/sh'.format(crmsh.constants.SSH_OPTION)], + ['su', 'alice', '--login', '-c', 'ssh -A {} bob@host1 sudo -H /bin/sh'.format(crmsh.constants.SSH_OPTION), '-w', 'SSH_AUTH_SOCK'], b'foo', stdout=crmsh.prun.runner.Task.Capture, stderr=crmsh.prun.runner.Task.Capture, context={"host": 'host1', "ssh_user": 'bob'}, )), mock.call(TaskArgumentsEq( - ['su', 'alice', '--login', '-c', 'ssh {} bob@host2 sudo -H /bin/sh'.format(crmsh.constants.SSH_OPTION)], + ['su', 'alice', '--login', '-c', 'ssh -A {} bob@host2 sudo -H /bin/sh'.format(crmsh.constants.SSH_OPTION), '-w', 'SSH_AUTH_SOCK'], b'bar', stdout=crmsh.prun.runner.Task.Capture, stderr=crmsh.prun.runner.Task.Capture, @@ -90,14 +90,14 @@ def test_prun_root( ]) mock_runner_add_task.assert_has_calls([ mock.call(TaskArgumentsEq( - ['/bin/sh', '-c', 'ssh {} root@host1 sudo -H /bin/sh'.format(crmsh.constants.SSH_OPTION)], + ['/bin/sh', '-c', 'ssh -A {} root@host1 sudo -H /bin/sh'.format(crmsh.constants.SSH_OPTION)], b'foo', stdout=crmsh.prun.runner.Task.Capture, stderr=crmsh.prun.runner.Task.Capture, context={"host": 'host1', "ssh_user": 'root'}, )), mock.call(TaskArgumentsEq( - ['/bin/sh', '-c', 'ssh {} root@host2 sudo -H /bin/sh'.format(crmsh.constants.SSH_OPTION)], + ['/bin/sh', '-c', 'ssh -A {} root@host2 sudo -H /bin/sh'.format(crmsh.constants.SSH_OPTION)], b'bar', stdout=crmsh.prun.runner.Task.Capture, stderr=crmsh.prun.runner.Task.Capture, @@ -155,3 +155,6 @@ def __eq__(self, other): and self.stdout_config == other.stdout_config \ and self.stderr_config == other.stderr_config \ and self.context == other.context + + def __repr__(self): + return f"TaskArgumentsEq({self.args}, {self.input}, {self.stdout_config}, {self.stderr_config}, {self.context}"