X-Git-Url: https://git.madduck.net/etc/taskwarrior.git/blobdiff_plain/b0b1b863292eb7b5777cfb25a6bc5d1ab0b8282f..0b7eee0a63b5d15d2dbbba7d35cb9600c5b7bab1:/tasklib/task.py?ds=sidebyside diff --git a/tasklib/task.py b/tasklib/task.py index 38386ef..ce915bf 100644 --- a/tasklib/task.py +++ b/tasklib/task.py @@ -5,6 +5,7 @@ import json import logging import os import pytz +import re import six import sys import subprocess @@ -236,8 +237,15 @@ class SerializingObject(object): return ','.join(task['uuid'] for task in value) def deserialize_depends(self, raw_uuids): - raw_uuids = raw_uuids or '' # Convert None to empty string - uuids = raw_uuids.split(',') + raw_uuids = raw_uuids or [] # Convert None to empty list + + # TW 2.4.4 encodes list of dependencies as a single string + if type(raw_uuids) is not list: + uuids = raw_uuids.split(',') + # TW 2.4.5 and later exports them as a list, no conversion needed + else: + uuids = raw_uuids + return set(self.warrior.tasks.get(uuid=uuid) for uuid in uuids if uuid) def datetime_normalizer(self, value): @@ -297,7 +305,7 @@ class TaskResource(SerializingObject): # are not propagated. self._original_data = copy.deepcopy(self._data) - def _update_data(self, data, update_original=False): + def _update_data(self, data, update_original=False, remove_missing=False): """ Low level update of the internal _data dict. Data which are coming as updates should already be serialized. If update_original is True, the @@ -306,6 +314,11 @@ class TaskResource(SerializingObject): self._data.update(dict((key, self._deserialize(key, value)) for key, value in data.items())) + # In certain situations, we want to treat missing keys as removals + if remove_missing: + for key in set(self._data.keys()) - set(data.keys()): + self._data[key] = None + if update_original: self._original_data = copy.deepcopy(self._data) @@ -416,6 +429,18 @@ class Task(TaskResource): """ pass + class ActiveTask(Exception): + """ + Raised when the operation cannot be performed on the active task. + """ + pass + + class InactiveTask(Exception): + """ + Raised when the operation cannot be performed on an inactive task. + """ + pass + class NotSaved(Exception): """ Raised when the operation cannot be performed on the task, because @@ -458,7 +483,8 @@ class Task(TaskResource): # If this is a on-modify event, we are provided with additional # line of input, which provides updated data if modify: - task._update_data(json.loads(input_file.readline().strip())) + task._update_data(json.loads(input_file.readline().strip()), + remove_missing=True) return task @@ -518,6 +544,10 @@ class Task(TaskResource): def pending(self): return self['status'] == six.text_type('pending') + @property + def active(self): + return self['start'] is not None + @property def saved(self): return self['uuid'] is not None or self['id'] is not None @@ -556,7 +586,7 @@ class Task(TaskResource): if self.warrior.version < VERSION_2_4_0: return self._data['description'] else: - return "description:'{0}'".format(self._data['description'] or '') + return six.u("description:'{0}'").format(self._data['description'] or '') def delete(self): if not self.saved: @@ -584,12 +614,29 @@ class Task(TaskResource): raise Task.CompletedTask("Cannot start a completed task") elif self.deleted: raise Task.DeletedTask("Deleted task cannot be started") + elif self.active: + raise Task.ActiveTask("Task is already active") self.warrior.execute_command([self['uuid'], 'start']) # Refresh the status again, so that we have updated info stored self.refresh(only_fields=['status', 'start']) + def stop(self): + if not self.saved: + raise Task.NotSaved("Task needs to be saved before it can be stopped") + + # Refresh, and raise exception if task is already completed/deleted + self.refresh(only_fields=['status']) + + if not self.active: + raise Task.InactiveTask("Cannot stop an inactive task") + + self.warrior.execute_command([self['uuid'], 'stop']) + + # Refresh the status again, so that we have updated info stored + self.refresh(only_fields=['status', 'start']) + def done(self): if not self.saved: raise Task.NotSaved("Task needs to be saved before it can be completed") @@ -602,6 +649,10 @@ class Task(TaskResource): elif self.deleted: raise Task.DeletedTask("Deleted task cannot be completed") + # Older versions of TW do not stop active task at completion + if self.warrior.version < VERSION_2_4_0 and self.active: + self.stop() + self.warrior.execute_command([self['uuid'], 'done']) # Refresh the status again, so that we have updated info stored @@ -626,7 +677,13 @@ class Task(TaskResource): "task: %s" % '\n'.join(id_lines)) # Circumvent the ID storage, since ID is considered read-only - self._data['id'] = int(id_lines[0].split(' ')[2].rstrip('.')) + identifier = id_lines[0].split(' ')[2].rstrip('.') + + # Identifier can be either ID or UUID for completed tasks + try: + self._data['id'] = int(identifier) + except ValueError: + self._data['uuid'] = identifier # Refreshing is very important here, as not only modification time # is updated, but arbitrary attribute may have changed due hooks @@ -664,9 +721,9 @@ class Task(TaskResource): if serialized_value is '': escaped_serialized_value = '' else: - escaped_serialized_value = "'{0}'".format(serialized_value) + escaped_serialized_value = six.u("'{0}'").format(serialized_value) - format_default = lambda: "{0}:{1}".format(field, + format_default = lambda: six.u("{0}:{1}").format(field, escaped_serialized_value) format_func = getattr(self, 'format_{0}'.format(field), @@ -740,7 +797,7 @@ class TaskFilter(SerializingObject): modifier = '.is' if value else '.none' key = key + modifier if '.' not in key else key - self.filter_params.append("{0}:{1}".format(key, value)) + self.filter_params.append(six.u("{0}:{1}").format(key, value)) def get_filter_params(self): return [f for f in self.filter_params if f] @@ -875,9 +932,13 @@ class TaskWarrior(object): 'confirmation': 'no', 'dependency.confirmation': 'no', # See TW-1483 or taskrc man page 'recurrence.confirmation': 'no', # Necessary for modifying R tasks + + # Defaults to on since 2.4.5, we expect off during parsing + 'json.array': 'off', + # 2.4.3 onwards supports 0 as infite bulk, otherwise set just # arbitrary big number which is likely to be large enough - 'bulk': 0 if self.version > VERSION_2_4_3 else 100000, + 'bulk': 0 if self.version >= VERSION_2_4_3 else 100000, } # Set data.location override if passed via kwarg @@ -885,7 +946,7 @@ class TaskWarrior(object): data_location = os.path.expanduser(data_location) if create and not os.path.exists(data_location): os.makedirs(data_location) - self.config['data.location'] = data_location, + self.config['data.location'] = data_location self.tasks = TaskQuerySet(self) @@ -895,7 +956,7 @@ class TaskWarrior(object): config.update(config_override) for item in config.items(): command_args.append('rc.{0}={1}'.format(*item)) - command_args.extend(map(str, args)) + command_args.extend(map(six.text_type, args)) return command_args def _get_version(self): @@ -906,7 +967,24 @@ class TaskWarrior(object): stdout, stderr = [x.decode('utf-8') for x in p.communicate()] return stdout.strip('\n') - def execute_command(self, args, config_override={}, allow_failure=True): + def get_config(self): + raw_output = self.execute_command( + ['show'], + config_override={'verbose': 'nothing'} + ) + + config = dict() + config_regex = re.compile(r'^(?P[^\s]+)\s+(?P[^\s].+$)') + + for line in raw_output: + match = config_regex.match(line) + if match: + config[match.group('key')] = match.group('value').strip() + + return config + + def execute_command(self, args, config_override={}, allow_failure=True, + return_all=False): command_args = self._get_command_args( args, config_override=config_override) logger.debug(' '.join(command_args)) @@ -919,7 +997,14 @@ class TaskWarrior(object): else: error_msg = stdout.strip() raise TaskWarriorException(error_msg) - return stdout.rstrip().split('\n') + + # Return all whole triplet only if explicitly asked for + if not return_all: + return stdout.rstrip().split('\n') + else: + return (stdout.rstrip().split('\n'), + stderr.rstrip().split('\n'), + p.returncode) def enforce_recurrence(self): # Run arbitrary report command which will trigger generation