diff --git a/Fakeum-1.1.alfredworkflow b/Fakeum-1.2.alfredworkflow similarity index 82% rename from Fakeum-1.1.alfredworkflow rename to Fakeum-1.2.alfredworkflow index f2fc622..aeb49ea 100644 Binary files a/Fakeum-1.1.alfredworkflow and b/Fakeum-1.2.alfredworkflow differ diff --git a/src/fakeum.py b/src/fakeum.py index df5f752..d278040 100644 --- a/src/fakeum.py +++ b/src/fakeum.py @@ -22,7 +22,7 @@ from workflow import Workflow, ICON_WARNING, MATCH_ALL, MATCH_ALLCHARS from faker import Factory -DELIMITER = '⟩' +DELIMITER = '×' HELP_URL = 'https://github.com/deanishe/alfred-fakeum' UPDATE_SETTINGS = {'github_slug': 'deanishe/alfred-fakeum'} diff --git a/src/version b/src/version index b123147..ea710ab 100644 --- a/src/version +++ b/src/version @@ -1 +1 @@ -1.1 \ No newline at end of file +1.2 \ No newline at end of file diff --git a/src/workflow/__init__.py b/src/workflow/__init__.py index 01a9cb3..5de1a96 100644 --- a/src/workflow/__init__.py +++ b/src/workflow/__init__.py @@ -69,40 +69,40 @@ ) __all__ = [ - Workflow, - manager, - PasswordNotFound, - KeychainError, - ICON_ACCOUNT, - ICON_BURN, - ICON_CLOCK, - ICON_COLOR, - ICON_COLOUR, - ICON_EJECT, - ICON_ERROR, - ICON_FAVORITE, - ICON_FAVOURITE, - ICON_GROUP, - ICON_HELP, - ICON_HOME, - ICON_INFO, - ICON_NETWORK, - ICON_NOTE, - ICON_SETTINGS, - ICON_SWIRL, - ICON_SWITCH, - ICON_SYNC, - ICON_TRASH, - ICON_USER, - ICON_WARNING, - ICON_WEB, - MATCH_ALL, - MATCH_ALLCHARS, - MATCH_ATOM, - MATCH_CAPITALS, - MATCH_INITIALS, - MATCH_INITIALS_CONTAIN, - MATCH_INITIALS_STARTSWITH, - MATCH_STARTSWITH, - MATCH_SUBSTRING, + 'Workflow', + 'manager', + 'PasswordNotFound', + 'KeychainError', + 'ICON_ACCOUNT', + 'ICON_BURN', + 'ICON_CLOCK', + 'ICON_COLOR', + 'ICON_COLOUR', + 'ICON_EJECT', + 'ICON_ERROR', + 'ICON_FAVORITE', + 'ICON_FAVOURITE', + 'ICON_GROUP', + 'ICON_HELP', + 'ICON_HOME', + 'ICON_INFO', + 'ICON_NETWORK', + 'ICON_NOTE', + 'ICON_SETTINGS', + 'ICON_SWIRL', + 'ICON_SWITCH', + 'ICON_SYNC', + 'ICON_TRASH', + 'ICON_USER', + 'ICON_WARNING', + 'ICON_WEB', + 'MATCH_ALL', + 'MATCH_ALLCHARS', + 'MATCH_ATOM', + 'MATCH_CAPITALS', + 'MATCH_INITIALS', + 'MATCH_INITIALS_CONTAIN', + 'MATCH_INITIALS_STARTSWITH', + 'MATCH_STARTSWITH', + 'MATCH_SUBSTRING', ] diff --git a/src/workflow/background.py b/src/workflow/background.py index 7ed4df2..bcfa74d 100644 --- a/src/workflow/background.py +++ b/src/workflow/background.py @@ -23,8 +23,14 @@ __all__ = ['is_running', 'run_in_background'] -wf = Workflow() -log = wf.logger +_wf = None + + +def wf(): + global _wf + if _wf is None: + _wf = Workflow() + return _wf def _arg_cache(name): @@ -37,7 +43,7 @@ def _arg_cache(name): """ - return wf.cachefile('{}.argcache'.format(name)) + return wf().cachefile('{0}.argcache'.format(name)) def _pid_file(name): @@ -50,7 +56,7 @@ def _pid_file(name): """ - return wf.cachefile('{}.pid'.format(name)) + return wf().cachefile('{0}.pid'.format(name)) def _process_exists(pid): @@ -114,10 +120,11 @@ def _background(stdin='/dev/null', stdout='/dev/null', if pid > 0: sys.exit(0) # Exit first parent. except OSError as e: - log.critical("fork #1 failed: (%d) %s\n" % (e.errno, e.strerror)) + wf().logger.critical("fork #1 failed: ({0:d}) {1}".format( + e.errno, e.strerror)) sys.exit(1) # Decouple from parent environment. - os.chdir(wf.workflowdir) + os.chdir(wf().workflowdir) os.umask(0) os.setsid() # Do second fork. @@ -126,7 +133,8 @@ def _background(stdin='/dev/null', stdout='/dev/null', if pid > 0: sys.exit(0) # Exit second parent. except OSError as e: - log.critical("fork #2 failed: (%d) %s\n" % (e.errno, e.strerror)) + wf().logger.critical("fork #2 failed: ({0:d}) {1}".format( + e.errno, e.strerror)) sys.exit(1) # Now I am a daemon! # Redirect standard file descriptors. @@ -169,7 +177,7 @@ def run_in_background(name, args, **kwargs): """ if is_running(name): - log.info('Task `{}` is already running'.format(name)) + wf().logger.info('Task `{0}` is already running'.format(name)) return argcache = _arg_cache(name) @@ -177,16 +185,16 @@ def run_in_background(name, args, **kwargs): # Cache arguments with open(argcache, 'wb') as file_obj: pickle.dump({'args': args, 'kwargs': kwargs}, file_obj) - log.debug('Command arguments cached to `{}`'.format(argcache)) + wf().logger.debug('Command arguments cached to `{0}`'.format(argcache)) # Call this script cmd = ['/usr/bin/python', __file__, name] - log.debug('Calling {!r} ...'.format(cmd)) + wf().logger.debug('Calling {0!r} ...'.format(cmd)) retcode = subprocess.call(cmd) if retcode: # pragma: no cover - log.error('Failed to call task in background') + wf().logger.error('Failed to call task in background') else: - log.debug('Executing task `{}` in background...'.format(name)) + wf().logger.debug('Executing task `{0}` in background...'.format(name)) return retcode @@ -200,7 +208,7 @@ def main(wf): # pragma: no cover name = wf.args[0] argcache = _arg_cache(name) if not os.path.exists(argcache): - log.critical('No arg cache found : {!r}'.format(argcache)) + wf.logger.critical('No arg cache found : {0!r}'.format(argcache)) return 1 # Load cached arguments @@ -221,23 +229,24 @@ def main(wf): # pragma: no cover # Write PID to file with open(pidfile, 'wb') as file_obj: - file_obj.write('{}'.format(os.getpid())) + file_obj.write('{0}'.format(os.getpid())) # Run the command try: - log.debug('Task `{}` running'.format(name)) - log.debug('cmd : {!r}'.format(args)) + wf.logger.debug('Task `{0}` running'.format(name)) + wf.logger.debug('cmd : {0!r}'.format(args)) retcode = subprocess.call(args, **kwargs) if retcode: - log.error('Command failed with [{}] : {!r}'.format(retcode, args)) + wf.logger.error('Command failed with [{0}] : {1!r}'.format( + retcode, args)) finally: if os.path.exists(pidfile): os.unlink(pidfile) - log.debug('Task `{}` finished'.format(name)) + wf.logger.debug('Task `{0}` finished'.format(name)) if __name__ == '__main__': # pragma: no cover - wf.run(main) + wf().run(main) diff --git a/src/workflow/update.py b/src/workflow/update.py index caa1125..b946e79 100644 --- a/src/workflow/update.py +++ b/src/workflow/update.py @@ -26,7 +26,6 @@ import os import tempfile -import argparse import re import subprocess @@ -35,10 +34,18 @@ # __all__ = [] -wf = workflow.Workflow() -log = wf.logger -RELEASES_BASE = 'https://api.github.com/repos/{}/releases' +RELEASES_BASE = 'https://api.github.com/repos/{0}/releases' + + +_wf = None + + +def wf(): + global _wf + if _wf is None: + _wf = workflow.Workflow() + return _wf class Version(object): @@ -66,7 +73,7 @@ def _parse(self, vstr): else: m = self.match_version(vstr) if not m: - raise ValueError('Invalid version number: {}'.format(vstr)) + raise ValueError('Invalid version number: {0}'.format(vstr)) version, suffix = m.groups() parts = self._parse_dotted_string(version) @@ -76,7 +83,7 @@ def _parse(self, vstr): if len(parts): self.patch = parts.pop(0) if not len(parts) == 0: - raise ValueError('Invalid version (too long) : {}'.format(vstr)) + raise ValueError('Invalid version (too long) : {0}'.format(vstr)) if suffix: # Build info @@ -87,11 +94,11 @@ def _parse(self, vstr): if suffix: if not suffix.startswith('-'): raise ValueError( - 'Invalid suffix : `{}`. Must start with `-`'.format( + 'Invalid suffix : `{0}`. Must start with `-`'.format( suffix)) self.suffix = suffix[1:] - # log.debug('version str `{}` -> {}'.format(vstr, repr(self))) + # wf().logger.debug('version str `{}` -> {}'.format(vstr, repr(self))) def _parse_dotted_string(self, s): """Parse string ``s`` into list of ints and strings""" @@ -112,7 +119,7 @@ def tuple(self): def __lt__(self, other): if not isinstance(other, Version): - raise ValueError('Not a Version instance: {!r}'.format(other)) + raise ValueError('Not a Version instance: {0!r}'.format(other)) t = self.tuple[:3] o = other.tuple[:3] if t < o: @@ -129,7 +136,7 @@ def __lt__(self, other): def __eq__(self, other): if not isinstance(other, Version): - raise ValueError('Not a Version instance: {!r}'.format(other)) + raise ValueError('Not a Version instance: {0!r}'.format(other)) return self.tuple == other.tuple def __ne__(self, other): @@ -137,27 +144,27 @@ def __ne__(self, other): def __gt__(self, other): if not isinstance(other, Version): - raise ValueError('Not a Version instance: {!r}'.format(other)) + raise ValueError('Not a Version instance: {0!r}'.format(other)) return other.__lt__(self) def __le__(self, other): if not isinstance(other, Version): - raise ValueError('Not a Version instance: {!r}'.format(other)) + raise ValueError('Not a Version instance: {0!r}'.format(other)) return not other.__lt__(self) def __ge__(self, other): return not self.__lt__(other) def __str__(self): - vstr = '{}.{}.{}'.format(self.major, self.minor, self.patch) + vstr = '{0}.{1}.{2}'.format(self.major, self.minor, self.patch) if self.suffix: - vstr += '-{}'.format(self.suffix) + vstr += '-{0}'.format(self.suffix) if self.build: - vstr += '+{}'.format(self.build) + vstr += '+{0}'.format(self.build) return vstr def __repr__(self): - return "Version('{}')".format(str(self)) + return "Version('{0}')".format(str(self)) def download_workflow(url): @@ -176,8 +183,9 @@ def download_workflow(url): local_path = os.path.join(tempfile.gettempdir(), filename) - log.debug('Downloading updated workflow from `{}` to `{}` ...'.format(url, - local_path)) + wf().logger.debug( + 'Downloading updated workflow from `{0}` to `{1}` ...'.format( + url, local_path)) response = web.get(url) @@ -196,7 +204,7 @@ def build_api_url(slug): """ if len(slug.split('/')) != 2: - raise ValueError('Invalid GitHub slug : {}'.format(slug)) + raise ValueError('Invalid GitHub slug : {0}'.format(slug)) return RELEASES_BASE.format(slug) @@ -219,15 +227,17 @@ def get_valid_releases(github_slug): api_url = build_api_url(github_slug) releases = [] - log.debug('Retrieving releases list from `{}` ...'.format(api_url)) + wf().logger.debug('Retrieving releases list from `{0}` ...'.format( + api_url)) def retrieve_releases(): - log.info('Retriving releases for `{}` ...'.format(github_slug)) + wf().logger.info( + 'Retrieving releases for `{0}` ...'.format(github_slug)) return web.get(api_url).json() slug = github_slug.replace('/', '-') - for release in wf.cached_data('gh-releases-{}'.format(slug), - retrieve_releases): + for release in wf().cached_data('gh-releases-{0}'.format(slug), + retrieve_releases): version = release['tag_name'] download_urls = [] for asset in release.get('assets', []): @@ -238,19 +248,19 @@ def retrieve_releases(): # Validate release if release['prerelease']: - log.warning( - 'Invalid release {} : pre-release detected'.format(version)) + wf().logger.warning( + 'Invalid release {0} : pre-release detected'.format(version)) continue if not download_urls: - log.warning( - 'Invalid release {} : No workflow file'.format(version)) + wf().logger.warning( + 'Invalid release {0} : No workflow file'.format(version)) continue if len(download_urls) > 1: - log.warning( - 'Invalid release {} : multiple workflow files'.format(version)) + wf().logger.warning( + 'Invalid release {0} : multiple workflow files'.format(version)) continue - log.debug('Release `{}` : {}'.format(version, url)) + wf().logger.debug('Release `{0}` : {1}'.format(version, url)) releases.append({'version': version, 'download_url': download_urls[0]}) return releases @@ -272,10 +282,11 @@ def check_update(github_slug, current_version): releases = get_valid_releases(github_slug) - log.info('{} releases for {}'.format(len(releases), github_slug)) + wf().logger.info('{0} releases for {1}'.format(len(releases), + github_slug)) if not len(releases): - raise ValueError('No valid releases for {}'.format(github_slug)) + raise ValueError('No valid releases for {0}'.format(github_slug)) # GitHub returns releases newest-first latest_release = releases[0] @@ -283,10 +294,10 @@ def check_update(github_slug, current_version): # (latest_version, download_url) = get_latest_release(releases) vr = Version(latest_release['version']) vl = Version(current_version) - log.debug('Latest : {!r} Installed : {!r}'.format(vr, vl)) + wf().logger.debug('Latest : {0!r} Installed : {1!r}'.format(vr, vl)) if vr > vl: - wf.cache_data('__workflow_update_status', { + wf().cache_data('__workflow_update_status', { 'version': latest_release['version'], 'download_url': latest_release['download_url'], 'available': True @@ -294,7 +305,7 @@ def check_update(github_slug, current_version): return True - wf.cache_data('__workflow_update_status', { + wf().cache_data('__workflow_update_status', { 'available': False }) return False @@ -313,39 +324,40 @@ def install_update(github_slug, current_version): :returns: ``True`` if an update is installed, else ``False`` """ + # TODO: `github_slug` and `current_version` are both unusued. - update_data = wf.cached_data('__workflow_update_status', max_age=0) + update_data = wf().cached_data('__workflow_update_status', max_age=0) if not update_data or not update_data.get('available'): - wf.logger.info('No update available') + wf().logger.info('No update available') return False local_file = download_workflow(update_data['download_url']) - log.info('Installing updated workflow ...') + wf().logger.info('Installing updated workflow ...') subprocess.call(['open', local_file]) update_data['available'] = False - wf.cache_data('__workflow_update_status', update_data) + wf().cache_data('__workflow_update_status', update_data) return True if __name__ == '__main__': # pragma: nocover - parser = argparse.ArgumentParser( - description='Check for and install updates') - parser.add_argument( - 'action', - choices=['check', 'install'], - help='Check for new version or install new version?') - parser.add_argument( - 'github_slug', - help='GitHub repo name in format "username/repo"') - parser.add_argument( - 'version', - help='The version of the installed workflow') - - args = parser.parse_args() - if args.action == 'check': - check_update(args.github_slug, args.version) - elif args.action == 'install': - install_update(args.github_slug, args.version) + import sys + + def show_help(): + print('Usage : update.py (check|install) github_slug version') + sys.exit(1) + + if len(sys.argv) != 4: + show_help() + + action, github_slug, version = sys.argv[1:] + + if action not in ('check', 'install'): + show_help() + + if action == 'check': + check_update(github_slug, version) + elif action == 'install': + install_update(github_slug, version) diff --git a/src/workflow/version b/src/workflow/version index e33692a..9f76d37 100644 --- a/src/workflow/version +++ b/src/workflow/version @@ -1 +1 @@ -1.10.1 \ No newline at end of file +1.13 \ No newline at end of file diff --git a/src/workflow/web.py b/src/workflow/web.py index 8b8e421..153833b 100644 --- a/src/workflow/web.py +++ b/src/workflow/web.py @@ -24,10 +24,11 @@ import unicodedata import urllib import urllib2 +import urlparse import zlib -USER_AGENT = u'alfred-workflow-0.1' +USER_AGENT = u'Alfred-Workflow/1.11 (http://www.deanishe.net)' # Valid characters for multipart form data boundaries BOUNDARY_CHARS = string.digits + string.ascii_letters @@ -215,7 +216,9 @@ def __init__(self, request): self.url = err.geturl() # sometimes (e.g. when authentication fails) # urllib can't get a URL from an HTTPError - except AttributeError: + # This behaviour changes across Python versions, + # so no test cover (it isn't important). + except AttributeError: # pragma: no cover pass self.status_code = err.code else: @@ -468,6 +471,8 @@ def request(method, url, params=None, data=None, headers=None, cookies=None, """ + # TODO: cookies + # TODO: any way to force GET or POST? socket.setdefaulttimeout(timeout) # Default handlers @@ -518,7 +523,17 @@ def request(method, url, params=None, data=None, headers=None, cookies=None, url = url.encode('utf-8') if params: # GET args (POST args are handled in encode_multipart_formdata) - url = url + '?' + urllib.urlencode(str_dict(params)) + + scheme, netloc, path, query, fragment = urlparse.urlsplit(url) + + if query: # Combine query string and `params` + url_params = urlparse.parse_qs(query) + # `params` take precedence over URL query string + url_params.update(params) + params = url_params + + query = urllib.urlencode(str_dict(params), doseq=True) + url = urlparse.urlunsplit((scheme, netloc, path, query, fragment)) req = urllib2.Request(url, data, headers) return Response(req) diff --git a/src/workflow/workflow.py b/src/workflow/workflow.py index ce6506d..fe65a81 100644 --- a/src/workflow/workflow.py +++ b/src/workflow/workflow.py @@ -17,20 +17,25 @@ from __future__ import print_function, unicode_literals +import binascii +from contextlib import contextmanager +import cPickle +import errno +import json +import logging +import logging.handlers import os -import sys -import string -import re +import pickle import plistlib -import subprocess -import unicodedata +import re import shutil -import json -import cPickle -import pickle +import signal +import string +import subprocess +import sys import time -import logging -import logging.handlers +import unicodedata + try: import xml.etree.cElementTree as ET except ImportError: # pragma: no cover @@ -432,9 +437,13 @@ #################################################################### -# Keychain access errors +# Lockfile and Keychain access errors #################################################################### +class AcquisitionError(Exception): + """Raised if a lock cannot be acquired.""" + + class KeychainError(Exception): """Raised by methods :meth:`Workflow.save_password`, :meth:`Workflow.get_password` and :meth:`Workflow.delete_password` @@ -551,7 +560,7 @@ def unregister(self, name): """ if name not in self._serializers: - raise ValueError('No such serializer registered : {}'.format(name)) + raise ValueError('No such serializer registered : {0}'.format(name)) serializer = self._serializers[name] del self._serializers[name] @@ -734,13 +743,20 @@ def elem(self): """ + # Attributes on element attr = {} if self.valid: attr['valid'] = 'yes' else: attr['valid'] = 'no' + # Allow empty string for autocomplete. This is a useful value, + # as TABing the result will revert the query back to just the + # keyword + if self.autocomplete is not None: + attr['autocomplete'] = self.autocomplete + # Optional attributes - for name in ('uid', 'type', 'autocomplete'): + for name in ('uid', 'type'): value = getattr(self, name, None) if value: attr[name] = value @@ -748,14 +764,18 @@ def elem(self): root = ET.Element('item', attr) ET.SubElement(root, 'title').text = self.title ET.SubElement(root, 'subtitle').text = self.subtitle + # Add modifier subtitles for mod in ('cmd', 'ctrl', 'alt', 'shift', 'fn'): if mod in self.modifier_subtitles: ET.SubElement(root, 'subtitle', {'mod': mod}).text = self.modifier_subtitles[mod] + # Add arg as element instead of attribute on , as it's more + # flexible (newlines aren't allowed in attributes) if self.arg: ET.SubElement(root, 'arg').text = self.arg + # Add icon if there is one if self.icon: if self.icontype: @@ -775,6 +795,153 @@ def elem(self): return root +class LockFile(object): + """Context manager to create lock files""" + + def __init__(self, protected_path, timeout=0, delay=0.05): + self.lockfile = protected_path + '.lock' + self.timeout = timeout + self.delay = delay + self._locked = False + + @property + def locked(self): + """`True` if file is locked by this instance.""" + return self._locked + + def acquire(self, blocking=True): + """Acquire the lock if possible. + + If the lock is in use and ``blocking`` is ``False``, return + ``False``. + + Otherwise, check every `self.delay` seconds until it acquires + lock or exceeds `self.timeout` and raises an exception. + + """ + start = time.time() + while True: + try: + fd = os.open(self.lockfile, os.O_CREAT | os.O_EXCL | os.O_RDWR) + with os.fdopen(fd, 'w') as fd: + fd.write('{0}'.format(os.getpid())) + break + except OSError as err: + if err.errno != errno.EEXIST: # pragma: no cover + raise + if self.timeout and (time.time() - start) >= self.timeout: + raise AcquisitionError('Lock acquisition timed out.') + if not blocking: + return False + time.sleep(self.delay) + + self._locked = True + return True + + def release(self): + """Release the lock by deleting `self.lockfile`.""" + self._locked = False + os.unlink(self.lockfile) + + def __enter__(self): + """Acquire lock.""" + self.acquire() + return self + + def __exit__(self, typ, value, traceback): + """Release lock.""" + self.release() + + def __del__(self): + """Clear up `self.lockfile`.""" + if self._locked: # pragma: no cover + self.release() + + +@contextmanager +def atomic_writer(file_path, mode): + """Atomic file writer. + + :param file_path: path of file to write to. + :type file_path: ``unicode`` + :param mode: sames as for `func:open` + :type mode: string + + .. versionadded:: 1.12 + + Context manager that ensures the file is only written if the write + succeeds. The data is first written to a temporary file. + + """ + + temp_suffix = '.aw.temp' + temp_file_path = file_path + temp_suffix + with open(temp_file_path, mode) as file_obj: + try: + yield file_obj + os.rename(temp_file_path, file_path) + finally: + try: + os.remove(temp_file_path) + except (OSError, IOError): + pass + + +class uninterruptible(object): + """Decorator that postpones SIGTERM until wrapped function is complete. + + .. versionadded:: 1.12 + + Since version 2.7, Alfred allows Script Filters to be killed. If + your workflow is killed in the middle of critical code (e.g. + writing data to disk), this may corrupt your workflow's data. + + Use this decorator to wrap critical functions that *must* complete. + If the script is killed while a wrapped function is executing, + the SIGTERM will be caught and handled after your function has + finished executing. + + Alfred-Workflow uses this internally to ensure its settings, data + and cache writes complete. + + .. important:: + + This decorator is NOT thread-safe. + + """ + + def __init__(self, func, class_name=''): + self.func = func + self._caught_signal = None + + def signal_handler(self, signum, frame): + """Called when process receives SIGTERM.""" + self._caught_signal = (signum, frame) + + def __call__(self, *args, **kwargs): + self._caught_signal = None + # Register handler for SIGTERM, then call `self.func` + self.old_signal_handler = signal.getsignal(signal.SIGTERM) + signal.signal(signal.SIGTERM, self.signal_handler) + + self.func(*args, **kwargs) + + # Restore old signal handler + signal.signal(signal.SIGTERM, self.old_signal_handler) + + # Handle any signal caught during execution + if self._caught_signal is not None: + signum, frame = self._caught_signal + if callable(self.old_signal_handler): + self.old_signal_handler(signum, frame) + elif self.old_signal_handler == signal.SIG_DFL: + sys.exit(0) + + def __get__(self, obj=None, klass=None): + return self.__class__(self.func.__get__(obj, klass), + klass.__name__) + + class Settings(dict): """A dictionary that saves itself when changed. @@ -826,9 +993,10 @@ def save(self): data = {} for key, value in self.items(): data[key] = value - with open(self._filepath, 'wb') as file_obj: - json.dump(data, file_obj, sort_keys=True, indent=2, - encoding='utf-8') + with LockFile(self._filepath): + with atomic_writer(self._filepath, 'wb') as file_obj: + json.dump(data, file_obj, sort_keys=True, indent=2, + encoding='utf-8') # dict methods def __setitem__(self, key, value): @@ -940,9 +1108,6 @@ def __init__(self, default_settings=None, update_settings=None, if libraries: sys.path = libraries + sys.path - if update_settings: - self.check_update() - #################################################################### # API methods #################################################################### @@ -1036,7 +1201,7 @@ def info(self): @property def bundleid(self): - """Workflow bundle ID from Alfred's environmental vars or ``info.plist``. + """Workflow bundle ID from environmental vars or ``info.plist``. :returns: bundle ID :rtype: ``unicode`` @@ -1135,7 +1300,7 @@ def args(self): # Handle magic args if len(args) and self._capture_args: for name in self.magic_arguments: - key = '{}{}'.format(self.magic_prefix, name) + key = '{0}{1}'.format(self.magic_prefix, name) if key in args: msg = self.magic_arguments[name]() @@ -1374,6 +1539,8 @@ def settings(self): """ if not self._settings: + self.logger.debug('Reading settings from `{0}` ...'.format( + self.settings_path)) self._settings = Settings(self.settings_path, self._default_settings) return self._settings @@ -1416,11 +1583,11 @@ def cache_serializer(self, serializer_name): if manager.serializer(serializer_name) is None: raise ValueError( - 'Unknown serializer : `{}`. Register your serializer ' + 'Unknown serializer : `{0}`. Register your serializer ' 'with `manager` first.'.format(serializer_name)) self.logger.debug( - 'default cache serializer set to `{}`'.format(serializer_name)) + 'default cache serializer set to `{0}`'.format(serializer_name)) self._cache_serializer = serializer_name @@ -1461,11 +1628,11 @@ def data_serializer(self, serializer_name): if manager.serializer(serializer_name) is None: raise ValueError( - 'Unknown serializer : `{}`. Register your serializer ' + 'Unknown serializer : `{0}`. Register your serializer ' 'with `manager` first.'.format(serializer_name)) self.logger.debug( - 'default data serializer set to `{}`'.format(serializer_name)) + 'default data serializer set to `{0}`'.format(serializer_name)) self._data_serializer = serializer_name @@ -1479,10 +1646,10 @@ def stored_data(self, name): """ - metadata_path = self.datafile('.{}.alfred-workflow'.format(name)) + metadata_path = self.datafile('.{0}.alfred-workflow'.format(name)) if not os.path.exists(metadata_path): - self.logger.debug('No data stored for `{}`'.format(name)) + self.logger.debug('No data stored for `{0}`'.format(name)) return None with open(metadata_path, 'rb') as file_obj: @@ -1492,18 +1659,18 @@ def stored_data(self, name): if serializer is None: raise ValueError( - 'Unknown serializer `{}`. Register a corresponding serializer ' - 'with `manager.register()` to load this data.'.format( - serializer_name)) + 'Unknown serializer `{0}`. Register a corresponding ' + 'serializer with `manager.register()` ' + 'to load this data.'.format(serializer_name)) - self.logger.debug('Data `{}` stored in `{}` format'.format( + self.logger.debug('Data `{0}` stored in `{1}` format'.format( name, serializer_name)) - filename = '{}.{}'.format(name, serializer_name) + filename = '{0}.{1}'.format(name, serializer_name) data_path = self.datafile(filename) if not os.path.exists(data_path): - self.logger.debug('No data stored for `{}`'.format(name)) + self.logger.debug('No data stored for `{0}`'.format(name)) if os.path.exists(metadata_path): os.unlink(metadata_path) @@ -1512,7 +1679,7 @@ def stored_data(self, name): with open(data_path, 'rb') as file_obj: data = serializer.load(file_obj) - self.logger.debug('Stored data loaded from : {}'.format(data_path)) + self.logger.debug('Stored data loaded from : {0}'.format(data_path)) return data @@ -1523,6 +1690,8 @@ def store_data(self, name, data, serializer=None): If ``data`` is ``None``, the datastore will be deleted. + Note that the datastore does NOT support mutliple threads. + :param name: name of datastore :param data: object(s) to store. **Note:** some serializers can only handled certain types of data. @@ -1533,43 +1702,54 @@ def store_data(self, name, data, serializer=None): """ + # Ensure deletion is not interrupted by SIGTERM + @uninterruptible + def delete_paths(paths): + """Clear one or more data stores""" + for path in paths: + if os.path.exists(path): + os.unlink(path) + self.logger.debug('Deleted data file : {0}'.format(path)) + serializer_name = serializer or self.data_serializer - if serializer_name == 'json' and name == 'settings': + # In order for `stored_data()` to be able to load data stored with + # an arbitrary serializer, yet still have meaningful file extensions, + # the format (i.e. extension) is saved to an accompanying file + metadata_path = self.datafile('.{0}.alfred-workflow'.format(name)) + filename = '{0}.{1}'.format(name, serializer_name) + data_path = self.datafile(filename) + + if data_path == self.settings_path: raise ValueError( - 'Cannot save data to `settings` with format `json`. ' + 'Cannot save data to' + + '`{0}` with format `{1}`. '.format(name, serializer_name) + "This would overwrite Alfred-Workflow's settings file.") serializer = manager.serializer(serializer_name) if serializer is None: raise ValueError( - 'Invalid serializer `{}`. Register your serializer with ' + 'Invalid serializer `{0}`. Register your serializer with ' '`manager.register()` first.'.format(serializer_name)) - # In order for `stored_data()` to be able to load data stored with - # an arbitrary serializer, yet still have meaningful file extensions, - # the format (i.e. extension) is saved to an accompanying file - metadata_path = self.datafile('.{}.alfred-workflow'.format(name)) - filename = '{}.{}'.format(name, serializer_name) - data_path = self.datafile(filename) - if data is None: # Delete cached data - for path in (metadata_path, data_path): - if os.path.exists(path): - os.unlink(path) - self.logger.debug('Deleted data file : {}'.format(path)) - + delete_paths((metadata_path, data_path)) return - # Save file extension - with open(metadata_path, 'wb') as file_obj: - file_obj.write(serializer_name) + # Ensure write is not interrupted by SIGTERM + @uninterruptible + def _store(): + # Save file extension + with atomic_writer(metadata_path, 'wb') as file_obj: + file_obj.write(serializer_name) - with open(data_path, 'wb') as file_obj: - serializer.dump(data, file_obj) + with atomic_writer(data_path, 'wb') as file_obj: + serializer.dump(data, file_obj) + + _store() - self.logger.debug('Stored data saved at : {}'.format(data_path)) + self.logger.debug('Stored data saved at : {0}'.format(data_path)) def cached_data(self, name, data_func=None, max_age=60): """Retrieve data from cache or re-generate and re-cache data if @@ -1628,7 +1808,7 @@ def cache_data(self, name, data): self.logger.debug('Deleted cache file : %s', cache_path) return - with open(cache_path, 'wb') as file_obj: + with atomic_writer(cache_path, 'wb') as file_obj: serializer.dump(data, file_obj) self.logger.debug('Cached data saved at : %s', cache_path) @@ -1717,19 +1897,30 @@ def filter(self, query, items, key=lambda x: x, ascending=False, By default, :meth:`filter` uses all of the following flags (i.e. :const:`MATCH_ALL`). The tests are always run in the given order: - 1. :const:`MATCH_STARTSWITH` : Item search key startswith ``query`` (case-insensitive). - 2. :const:`MATCH_CAPITALS` : The list of capital letters in item search key starts with ``query`` (``query`` may be lower-case). E.g., ``of`` would match ``OmniFocus``, ``gc`` would match ``Google Chrome`` - 3. :const:`MATCH_ATOM` : Search key is split into "atoms" on non-word characters (.,-,' etc.). Matches if ``query`` is one of these atoms (case-insensitive). - 4. :const:`MATCH_INITIALS_STARTSWITH` : Initials are the first characters of the above-described "atoms" (case-insensitive). - 5. :const:`MATCH_INITIALS_CONTAIN` : ``query`` is a substring of the above-described initials. + 1. :const:`MATCH_STARTSWITH` : Item search key startswith + ``query``(case-insensitive). + 2. :const:`MATCH_CAPITALS` : The list of capital letters in item + search key starts with ``query`` (``query`` may be + lower-case). E.g., ``of`` would match ``OmniFocus``, + ``gc`` would match ``Google Chrome`` + 3. :const:`MATCH_ATOM` : Search key is split into "atoms" on + non-word characters (.,-,' etc.). Matches if ``query`` is + one of these atoms (case-insensitive). + 4. :const:`MATCH_INITIALS_STARTSWITH` : Initials are the first + characters of the above-described "atoms" (case-insensitive). + 5. :const:`MATCH_INITIALS_CONTAIN` : ``query`` is a substring of + the above-described initials. 6. :const:`MATCH_INITIALS` : Combination of (4) and (5). - 7. :const:`MATCH_SUBSTRING` : Match if ``query`` is a substring of item search key (case-insensitive). - 8. :const:`MATCH_ALLCHARS` : Matches if all characters in ``query`` appear in item search key in the same order (case-insensitive). + 7. :const:`MATCH_SUBSTRING` : Match if ``query`` is a substring + of item search key (case-insensitive). + 8. :const:`MATCH_ALLCHARS` : Matches if all characters in + ``query`` appear in item search key in the same order + (case-insensitive). 9. :const:`MATCH_ALL` : Combination of all the above. - :const:`MATCH_ALLCHARS` is considerably slower than the other tests and - provides much less accurate results. + :const:`MATCH_ALLCHARS` is considerably slower than the other + tests and provides much less accurate results. **Examples:** @@ -1804,12 +1995,12 @@ def filter(self, query, items, key=lambda x: x, ascending=False, results.sort(reverse=ascending) results = [t[1] for t in results] - if max_results and len(results) > max_results: - results = results[:max_results] - if min_score: results = [r for r in results if r[1] > min_score] + if max_results and len(results) > max_results: + results = results[:max_results] + # return list of ``(item, score, rule)`` if include_score: return results @@ -1930,7 +2121,9 @@ def run(self, func): :param func: Callable to call with ``self`` (i.e. the :class:`Workflow` instance) as first argument. - ``func`` will be called with :class:`Workflow` instance as first argument. + ``func`` will be called with :class:`Workflow` instance as first + argument. + ``func`` should be the main entry point to your workflow. Any exceptions raised will be logged and an error message will be @@ -1940,18 +2133,32 @@ def run(self, func): start = time.time() + # Call workflow's entry function/method within a try-except block + # to catch any errors and display an error message in Alfred try: if self.version: - self.logger.debug('Workflow version : {}'.format(self.version)) + self.logger.debug('Workflow version : {0}'.format(self.version)) + + # Run update check if configured for self-updates. + # This call has to go in the `run` try-except block, as it will + # initialise `self.settings`, which will raise an exception + # if `settings.json` isn't valid. + + if self._update_settings: + self.check_update() + + # Run workflow's entry function/method func(self) + # Set last version run to current version after a successful # run self.set_last_version() + except Exception as err: self.logger.exception(err) if self.help_url: self.logger.info( - 'For assistance, see: {}'.format(self.help_url)) + 'For assistance, see: {0}'.format(self.help_url)) if not sys.stdout.isatty(): # Show error in Alfred self._items = [] if self._name: @@ -1965,7 +2172,7 @@ def run(self, func): self.send_feedback() return 1 finally: - self.logger.debug('Workflow finished in {:0.3f} seconds.'.format( + self.logger.debug('Workflow finished in {0:0.3f} seconds.'.format( time.time() - start)) return 0 @@ -2090,7 +2297,7 @@ def last_version_run(self): self._last_version_run = version - self.logger.debug('Last run version : {}'.format( + self.logger.debug('Last run version : {0}'.format( self._last_version_run)) return self._last_version_run @@ -2121,7 +2328,7 @@ def set_last_version(self, version=None): self.settings['__workflow_last_version'] = str(version) - self.logger.debug('Set last run version : {}'.format(version)) + self.logger.debug('Set last run version : {0}'.format(version)) return True @@ -2139,7 +2346,7 @@ def update_available(self): """ update_data = self.cached_data('__workflow_update_status', max_age=0) - self.logger.debug('update_data : {}'.format(update_data)) + self.logger.debug('update_data : {0}'.format(update_data)) if not update_data or not update_data.get('available'): return False @@ -2291,8 +2498,23 @@ def get_password(self, account, service=None): if not service: service = self.bundleid - password = self._call_security('find-generic-password', service, - account, '-w') + output = self._call_security('find-generic-password', service, + account, '-g') + + # Parsing of `security` output is adapted from python-keyring + # by Jason R. Coombs + # https://pypi.python.org/pypi/keyring + m = re.search( + r'password:\s*(?:0x(?P[0-9A-F]+)\s*)?(?:"(?P.*)")?', + output) + + if m: + groups = m.groupdict() + h = groups.get('hex') + password = groups.get('pw') + if h: + password = unicode(binascii.unhexlify(h), 'utf-8') + self.logger.debug('Got password : %s:%s', service, account) return password @@ -2323,6 +2545,7 @@ def delete_password(self, account, service=None): def _register_default_magic(self): """Register the built-in magic arguments""" + # TODO: refactor & simplify # Wrap callback and message with callable def callback(func, msg): @@ -2398,7 +2621,7 @@ def do_help(): def show_version(): if self.version: - return 'Version: {}'.format(self.version) + return 'Version: {0}'.format(self.version) else: return 'This workflow has no version number' @@ -2408,7 +2631,7 @@ def list_magic(): for name in sorted(self.magic_arguments.keys()): if name == 'magic': continue - arg = '{}{}'.format(self.magic_prefix, name) + arg = '{0}{1}'.format(self.magic_prefix, name) self.logger.debug(arg) if not isatty: