Skip to content

Commit

Permalink
Merge branch '2017.7' into service_unmask
Browse files Browse the repository at this point in the history
  • Loading branch information
Mike Place committed Dec 6, 2018
2 parents 38addd1 + cb85d5d commit b199255
Show file tree
Hide file tree
Showing 4 changed files with 41 additions and 23 deletions.
12 changes: 5 additions & 7 deletions .ci/lint
Expand Up @@ -60,7 +60,6 @@ pipeline {
read rc_exit < pylint-salt-chg.exit
exit "$rc_exit"
'''
archiveArtifacts artifacts: 'pylint-report-salt-chg.log'
}
}
stage('lint test chg') {
Expand All @@ -77,16 +76,16 @@ pipeline {
read rc_exit < pylint-tests-chg.exit
exit "$rc_exit"
'''
archiveArtifacts artifacts: 'pylint-report-tests-chg.log'
}
}
}
post {
always {
archiveArtifacts artifacts: 'pylint-report-*-chg.log', allowEmptyArchive: true
step([$class: 'WarningsPublisher',
parserConfigurations: [[
parserName: 'PyLint',
pattern: 'pylint-report*chg.log'
pattern: 'pylint-report-*-chg.log'
]],
failedTotalAll: '0',
useDeltaValues: false,
Expand All @@ -99,7 +98,7 @@ pipeline {
stage('linting all') {
// perform a full linit if this is a merge forward and the change only lint passed.
when {
expression { return params.BRANCH_NAME =~ /(?i)^merge-/ }
expression { return params.CHANGE_BRANCH =~ /(?i)^merge[._-]/ }
}
parallel {
stage('setup full') {
Expand All @@ -120,7 +119,6 @@ pipeline {
read rc_exit < pylint-salt-full.exit
exit "$rc_exit"
'''
archiveArtifacts artifacts: 'pylint-report-salt-full.log'
}
}
stage('lint test full') {
Expand All @@ -133,16 +131,16 @@ pipeline {
read rc_exit < pylint-tests-full.exit
exit "$rc_exit"
'''
archiveArtifacts artifacts: 'pylint-report-tests-full.log'
}
}
}
post {
always {
archiveArtifacts artifacts: 'pylint-report-*-full.log', allowEmptyArchive: true
step([$class: 'WarningsPublisher',
parserConfigurations: [[
parserName: 'PyLint',
pattern: 'pylint-report*full.log'
pattern: 'pylint-report-*-full.log'
]],
failedTotalAll: '0',
useDeltaValues: false,
Expand Down
10 changes: 7 additions & 3 deletions salt/client/ssh/ssh_py_shim.py
Expand Up @@ -292,19 +292,23 @@ def main(argv): # pylint: disable=W0613
if OPTIONS.cmd_umask is not None:
old_umask = os.umask(OPTIONS.cmd_umask) # pylint: disable=blacklisted-function
if OPTIONS.tty:
proc = subprocess.Popen(salt_argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Returns bytes instead of string on python 3
stdout, _ = subprocess.Popen(salt_argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
stdout, _ = proc.communicate()
sys.stdout.write(stdout.decode(encoding=get_system_encoding(), errors="replace"))
sys.stdout.flush()
retcode = proc.returncode
if OPTIONS.wipe:
shutil.rmtree(OPTIONS.saltdir)
elif OPTIONS.wipe:
subprocess.call(salt_argv)
retcode = subprocess.call(salt_argv)
shutil.rmtree(OPTIONS.saltdir)
else:
subprocess.call(salt_argv)
retcode = subprocess.call(salt_argv)
if OPTIONS.cmd_umask is not None:
os.umask(old_umask) # pylint: disable=blacklisted-function
return retcode


if __name__ == '__main__':
sys.exit(main(sys.argv))
40 changes: 28 additions & 12 deletions salt/netapi/rest_tornado/saltnado.py
Expand Up @@ -921,14 +921,27 @@ def _disbatch_local(self, chunk):
'''
Dispatch local client commands
'''
# Generate jid before triggering a job to subscribe all returns from minions
chunk['jid'] = salt.utils.jid.gen_jid()
# Generate jid and find all minions before triggering a job to subscribe all returns from minions
chunk['jid'] = salt.utils.jid.gen_jid() if not chunk.get('jid', None) else chunk['jid']
minions = set(self.ckminions.check_minions(chunk['tgt'], chunk.get('tgt_type', 'glob')))

def subscribe_minion(minion):
salt_evt = self.application.event_listener.get_event(
self,
tag='salt/job/{}/ret/{}'.format(chunk['jid'], minion),
matcher=EventListener.exact_matcher)
syndic_evt = self.application.event_listener.get_event(
self,
tag='syndic/job/{}/ret/{}'.format(chunk['jid'], minion),
matcher=EventListener.exact_matcher)
return salt_evt, syndic_evt

# start listening for the event before we fire the job to avoid races
events = [
self.application.event_listener.get_event(self, tag='salt/job/'+chunk['jid']),
self.application.event_listener.get_event(self, tag='syndic/job/'+chunk['jid']),
]
events = []
for minion in minions:
salt_evt, syndic_evt = subscribe_minion(minion)
events.append(salt_evt)
events.append(syndic_evt)

f_call = self._format_call_run_job_async(chunk)
# fire a job off
Expand All @@ -947,6 +960,12 @@ def _disbatch_local(self, chunk):
pass
raise tornado.gen.Return('No minions matched the target. No command was sent, no jid was assigned.')

# get_event for missing minion
for minion in list(set(pub_data['minions']) - set(minions)):
salt_evt, syndic_evt = subscribe_minion(minion)
events.append(salt_evt)
events.append(syndic_evt)

# Map of minion_id -> returned for all minions we think we need to wait on
minions = {m: False for m in pub_data['minions']}

Expand Down Expand Up @@ -1001,7 +1020,10 @@ def cancel_inflight_futures():
cancel_inflight_futures()
raise tornado.gen.Return(chunk_ret)
continue

f_result = f.result()
if f in events:
events.remove(f)
# if this is a start, then we need to add it to the pile
if f_result['tag'].endswith('/new'):
for minion_id in f_result['data']['minions']:
Expand All @@ -1011,7 +1033,6 @@ def cancel_inflight_futures():
chunk_ret[f_result['data']['id']] = f_result['data']['return']
# clear finished event future
minions[f_result['data']['id']] = True

# if there are no more minions to wait for, then we are done
if not more_todo() and min_wait_time.done():
cancel_inflight_futures()
Expand All @@ -1020,11 +1041,6 @@ def cancel_inflight_futures():
except TimeoutException:
pass

if f == events[0]:
events[0] = self.application.event_listener.get_event(self, tag='salt/job/'+chunk['jid'])
else:
events[1] = self.application.event_listener.get_event(self, tag='syndic/job/'+chunk['jid'])

@tornado.gen.coroutine
def job_not_running(self, jid, tgt, tgt_type, minions, is_finished):
'''
Expand Down
2 changes: 1 addition & 1 deletion tests/support/case.py
Expand Up @@ -502,7 +502,7 @@ def run_run(self, arg_str, with_retcode=False, catch_stderr=False, async=False,
arg_str,
with_retcode=with_retcode,
catch_stderr=catch_stderr,
timeout=60)
timeout=timeout + 10)

def run_run_plus(self, fun, *arg, **kwargs):
'''
Expand Down

0 comments on commit b199255

Please sign in to comment.