Skip to content

Commit

Permalink
Update to Alfred-Workflow 1.29
Browse files Browse the repository at this point in the history
  • Loading branch information
deanishe committed Dec 1, 2017
1 parent 25ed139 commit d783fd1
Show file tree
Hide file tree
Showing 6 changed files with 129 additions and 86 deletions.
1 change: 1 addition & 0 deletions .gitignore
Expand Up @@ -60,6 +60,7 @@ Session.vim
# Packages
*.egg
*.egg-info/
*.dist-info/
dist/
build/
eggs/
Expand Down
Binary file not shown.
Binary file modified src/icon.png
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
92 changes: 46 additions & 46 deletions src/info.plist
Expand Up @@ -48,44 +48,23 @@
<dict>
<key>config</key>
<dict>
<key>concurrently</key>
<key>lastpathcomponent</key>
<false/>
<key>escaping</key>
<integer>102</integer>
<key>script</key>
<string>mode=$1
datadir="$alfred_workflow_data"
cachedir="$alfred_workflow_cache"
blacklist="${datadir}/blacklist.txt"
logfile="${cachedir}/net.deanishe.alfred.fixum.log"
# create data &amp; cache directories, logfile and blacklist
test -d "$cachedir" || mkdir -p "$cachedir"
test -f "$logfile" || touch "$logfile"
test -d "$datadir" || mkdir -p "$datadir"
test -f "$blacklist" || cp blacklist.default.txt "$blacklist"
# script actions
[[ "$mode" = dryrun ]] &amp;&amp; /usr/bin/python fixum.py --nothing
[[ "$mode" = fix ]] &amp;&amp; /usr/bin/python fixum.py
[[ "$mode" = blacklist ]] &amp;&amp; open "$blacklist"
[[ "$mode" = log ]] &amp;&amp; open -a Console "$logfile"
exit 0</string>
<key>scriptargtype</key>
<integer>1</integer>
<key>scriptfile</key>
<string></string>
<key>type</key>
<integer>5</integer>
<key>onlyshowifquerypopulated</key>
<true/>
<key>removeextension</key>
<false/>
<key>text</key>
<string>{query}</string>
<key>title</key>
<string>Fixum</string>
</dict>
<key>type</key>
<string>alfred.workflow.action.script</string>
<string>alfred.workflow.output.notification</string>
<key>uid</key>
<string>97033D94-9B6F-446C-94E5-AB677B5ABB4F</string>
<string>90302262-60E4-4C1C-AAEA-2A5C3F4C025A</string>
<key>version</key>
<integer>2</integer>
<integer>1</integer>
</dict>
<dict>
<key>config</key>
Expand Down Expand Up @@ -137,23 +116,44 @@ exit 0</string>
<dict>
<key>config</key>
<dict>
<key>lastpathcomponent</key>
<false/>
<key>onlyshowifquerypopulated</key>
<true/>
<key>removeextension</key>
<key>concurrently</key>
<false/>
<key>text</key>
<string>{query}</string>
<key>title</key>
<string>Fixum</string>
<key>escaping</key>
<integer>102</integer>
<key>script</key>
<string>mode=$1
datadir="$alfred_workflow_data"
cachedir="$alfred_workflow_cache"
blacklist="${datadir}/blacklist.txt"
logfile="${cachedir}/net.deanishe.alfred.fixum.log"
# create data &amp; cache directories, logfile and blacklist
test -d "$cachedir" || mkdir -p "$cachedir"
test -f "$logfile" || touch "$logfile"
test -d "$datadir" || mkdir -p "$datadir"
test -f "$blacklist" || cp blacklist.default.txt "$blacklist"
# script actions
[[ "$mode" = dryrun ]] &amp;&amp; /usr/bin/python fixum.py --nothing
[[ "$mode" = fix ]] &amp;&amp; /usr/bin/python fixum.py
[[ "$mode" = blacklist ]] &amp;&amp; open "$blacklist"
[[ "$mode" = log ]] &amp;&amp; open -a Console "$logfile"
exit 0</string>
<key>scriptargtype</key>
<integer>1</integer>
<key>scriptfile</key>
<string></string>
<key>type</key>
<integer>5</integer>
</dict>
<key>type</key>
<string>alfred.workflow.output.notification</string>
<string>alfred.workflow.action.script</string>
<key>uid</key>
<string>90302262-60E4-4C1C-AAEA-2A5C3F4C025A</string>
<string>97033D94-9B6F-446C-94E5-AB677B5ABB4F</string>
<key>version</key>
<integer>1</integer>
<integer>2</integer>
</dict>
</array>
<key>readme</key>
Expand Down Expand Up @@ -185,7 +185,7 @@ It is primarily a workaround to fix bugs that are preventing the workflows from
</dict>
</dict>
<key>version</key>
<string>0.6</string>
<string>0.7</string>
<key>webaddress</key>
<string></string>
</dict>
Expand Down
120 changes: 81 additions & 39 deletions src/workflow/background.py
Expand Up @@ -19,6 +19,7 @@

from __future__ import print_function, unicode_literals

import signal
import sys
import os
import subprocess
Expand Down Expand Up @@ -82,6 +83,31 @@ def _process_exists(pid):
return True


def _job_pid(name):
"""Get PID of job or `None` if job does not exist.
Args:
name (str): Name of job.
Returns:
int: PID of job process (or `None` if job doesn't exist).
"""
pidfile = _pid_file(name)
if not os.path.exists(pidfile):
return

with open(pidfile, 'rb') as fp:
pid = int(fp.read())

if _process_exists(pid):
return pid

try:
os.unlink(pidfile)
except Exception: # pragma: no cover
pass


def is_running(name):
"""Test whether task ``name`` is currently running.
Expand All @@ -91,26 +117,18 @@ def is_running(name):
:rtype: bool
"""
pidfile = _pid_file(name)
if not os.path.exists(pidfile):
return False

with open(pidfile, 'rb') as file_obj:
pid = int(file_obj.read().strip())

if _process_exists(pid):
if _job_pid(name) is not None:
return True

elif os.path.exists(pidfile):
os.unlink(pidfile)

return False


def _background(stdin='/dev/null', stdout='/dev/null',
def _background(pidfile, stdin='/dev/null', stdout='/dev/null',
stderr='/dev/null'): # pragma: no cover
"""Fork the current process into a background daemon.
:param pidfile: file to write PID of daemon process to.
:type pidfile: filepath
:param stdin: where to read input
:type stdin: filepath
:param stdout: where to write stdout output
Expand All @@ -119,24 +137,31 @@ def _background(stdin='/dev/null', stdout='/dev/null',
:type stderr: filepath
"""
def _fork_and_exit_parent(errmsg):
def _fork_and_exit_parent(errmsg, wait=False, write=False):
try:
pid = os.fork()
if pid > 0:
if write: # write PID of child process to `pidfile`
tmp = pidfile + '.tmp'
with open(tmp, 'wb') as fp:
fp.write(str(pid))
os.rename(tmp, pidfile)
if wait: # wait for child process to exit
os.waitpid(pid, 0)
os._exit(0)
except OSError as err:
_log().critical('%s: (%d) %s', errmsg, err.errno, err.strerror)
raise err

# Do first fork.
_fork_and_exit_parent('fork #1 failed')
# Do first fork and wait for second fork to finish.
_fork_and_exit_parent('fork #1 failed', wait=True)

# Decouple from parent environment.
os.chdir(wf().workflowdir)
os.setsid()

# Do second fork.
_fork_and_exit_parent('fork #2 failed')
# Do second fork and write PID to pidfile.
_fork_and_exit_parent('fork #2 failed', write=True)

# Now I am a daemon!
# Redirect standard file descriptors.
Expand All @@ -151,10 +176,30 @@ def _fork_and_exit_parent(errmsg):
os.dup2(se.fileno(), sys.stderr.fileno())


def kill(name, sig=signal.SIGTERM):
"""Send a signal to job ``name`` via :func:`os.kill`.
.. versionadded:: 1.29
Args:
name (str): Name of the job
sig (int, optional): Signal to send (default: SIGTERM)
Returns:
bool: `False` if job isn't running, `True` if signal was sent.
"""
pid = _job_pid(name)
if pid is None:
return False

os.kill(pid, sig)
return True


def run_in_background(name, args, **kwargs):
r"""Cache arguments then call this script again via :func:`subprocess.call`.
:param name: name of task
:param name: name of job
:type name: unicode
:param args: arguments passed as first argument to :func:`subprocess.call`
:param \**kwargs: keyword arguments to :func:`subprocess.call`
Expand Down Expand Up @@ -183,18 +228,20 @@ def run_in_background(name, args, **kwargs):
argcache = _arg_cache(name)

# Cache arguments
with open(argcache, 'wb') as file_obj:
pickle.dump({'args': args, 'kwargs': kwargs}, file_obj)
with open(argcache, 'wb') as fp:
pickle.dump({'args': args, 'kwargs': kwargs}, fp)
_log().debug('[%s] command cached: %s', name, argcache)

# Call this script
cmd = ['/usr/bin/python', __file__, name]
_log().debug('[%s] passing job to background runner: %r', name, cmd)
retcode = subprocess.call(cmd)

if retcode: # pragma: no cover
_log().error('[%s] background runner failed with %d', retcode)
_log().error('[%s] background runner failed with %d', name, retcode)
else:
_log().debug('[%s] background job started', name)

return retcode


Expand All @@ -209,12 +256,17 @@ def main(wf): # pragma: no cover
name = wf.args[0]
argcache = _arg_cache(name)
if not os.path.exists(argcache):
log.critical('[%s] command cache not found: %r', name, argcache)
return 1
msg = '[{0}] command cache not found: {1}'.format(name, argcache)
log.critical(msg)
raise IOError(msg)

# Fork to background and run command
pidfile = _pid_file(name)
_background(pidfile)

# Load cached arguments
with open(argcache, 'rb') as file_obj:
data = pickle.load(file_obj)
with open(argcache, 'rb') as fp:
data = pickle.load(fp)

# Cached arguments
args = data['args']
Expand All @@ -223,28 +275,18 @@ def main(wf): # pragma: no cover
# Delete argument cache file
os.unlink(argcache)

pidfile = _pid_file(name)

# Fork to background
_background()

# Write PID to file
with open(pidfile, 'wb') as file_obj:
file_obj.write(str(os.getpid()))

# Run the command
try:
# Run the command
log.debug('[%s] running command: %r', name, args)

retcode = subprocess.call(args, **kwargs)

if retcode:
log.error('[%s] command failed with status %d', name, retcode)

finally:
if os.path.exists(pidfile):
os.unlink(pidfile)
log.debug('[%s] job complete', name)
os.unlink(pidfile)

log.debug('[%s] job complete', name)


if __name__ == '__main__': # pragma: no cover
Expand Down
2 changes: 1 addition & 1 deletion src/workflow/version
@@ -1 +1 @@
1.28.1
1.29

0 comments on commit d783fd1

Please sign in to comment.