X-Git-Url: https://git.madduck.net/etc/taskwarrior.git/blobdiff_plain/1fdf3d38f89946c7fee2efecc7d0e84c6eb82774..5c5b35a097608b63c3d54acf2f07d5cb00e85796:/tasklib/task.py diff --git a/tasklib/task.py b/tasklib/task.py index ce915bf..b7bba4b 100644 --- a/tasklib/task.py +++ b/tasklib/task.py @@ -5,34 +5,22 @@ import json import logging import os import pytz -import re import six import sys -import subprocess import tzlocal +from backends import TaskWarrior, TaskWarriorException + DATE_FORMAT = '%Y%m%dT%H%M%SZ' DATE_FORMAT_CALC = '%Y-%m-%dT%H:%M:%S' REPR_OUTPUT_SIZE = 10 PENDING = 'pending' COMPLETED = 'completed' -VERSION_2_1_0 = six.u('2.1.0') -VERSION_2_2_0 = six.u('2.2.0') -VERSION_2_3_0 = six.u('2.3.0') -VERSION_2_4_0 = six.u('2.4.0') -VERSION_2_4_1 = six.u('2.4.1') -VERSION_2_4_2 = six.u('2.4.2') -VERSION_2_4_3 = six.u('2.4.3') - logger = logging.getLogger(__name__) local_zone = tzlocal.get_localzone() -class TaskWarriorException(Exception): - pass - - class ReadOnlyDictView(object): """ Provides simplified read-only view upon dict object. @@ -392,9 +380,9 @@ class TaskResource(SerializingObject): class TaskAnnotation(TaskResource): read_only_fields = ['entry', 'description'] - def __init__(self, task, data={}): + def __init__(self, task, data=None): self.task = task - self._load_data(data) + self._load_data(data or dict()) super(TaskAnnotation, self).__init__(task.warrior) def remove(self): @@ -598,7 +586,7 @@ class Task(TaskResource): if self.deleted: raise Task.DeletedTask("Task was already deleted") - self.warrior.execute_command([self['uuid'], 'delete']) + self.backend.delete_task(self) # Refresh the status again, so that we have updated info stored self.refresh(only_fields=['status', 'start', 'end']) @@ -617,7 +605,7 @@ class Task(TaskResource): elif self.active: raise Task.ActiveTask("Task is already active") - self.warrior.execute_command([self['uuid'], 'start']) + self.backend.start_task(self) # Refresh the status again, so that we have updated info stored self.refresh(only_fields=['status', 'start']) @@ -632,7 +620,7 @@ class Task(TaskResource): if not self.active: raise Task.InactiveTask("Cannot stop an inactive task") - self.warrior.execute_command([self['uuid'], 'stop']) + self.backend.stop_task(self) # Refresh the status again, so that we have updated info stored self.refresh(only_fields=['status', 'start']) @@ -662,33 +650,8 @@ class Task(TaskResource): if self.saved and not self.modified: return - args = [self['uuid'], 'modify'] if self.saved else ['add'] - args.extend(self._get_modified_fields_as_args()) - output = self.warrior.execute_command(args) - - # Parse out the new ID, if the task is being added for the first time - if not self.saved: - id_lines = [l for l in output if l.startswith('Created task ')] - - # Complain loudly if it seems that more tasks were created - # Should not happen - if len(id_lines) != 1 or len(id_lines[0].split(' ')) != 3: - raise TaskWarriorException("Unexpected output when creating " - "task: %s" % '\n'.join(id_lines)) - - # Circumvent the ID storage, since ID is considered read-only - identifier = id_lines[0].split(' ')[2].rstrip('.') - - # Identifier can be either ID or UUID for completed tasks - try: - self._data['id'] = int(identifier) - except ValueError: - self._data['uuid'] = identifier - - # Refreshing is very important here, as not only modification time - # is updated, but arbitrary attribute may have changed due hooks - # altering the data before saving - self.refresh() + # All the actual work is done by the backend + self.backend.save_task(self) def add_annotation(self, annotation): if not self.saved: @@ -744,7 +707,7 @@ class Task(TaskResource): return args - def refresh(self, only_fields=[]): + def refresh(self, only_fields=None, after_save=False): # Raise error when trying to refresh a task that has not been saved if not self.saved: raise Task.NotSaved("Task needs to be saved to be refreshed") @@ -753,7 +716,39 @@ class Task(TaskResource): # of newly saved tasks. Any other place in the code is fine # with using UUID only. args = [self['uuid'] or self['id'], 'export'] - new_data = json.loads(self.warrior.execute_command(args)[0]) + output = self.warrior.execute_command(args) + + def valid(output): + return len(output) == 1 and output[0].startswith('{') + + # For older TW versions attempt to uniquely locate the task + # using the data we have if it has been just saved. + # This can happen when adding a completed task on older TW versions. + if (not valid(output) and self.warrior.version < VERSION_2_4_5 + and after_save): + + # Make a copy, removing ID and UUID. It's most likely invalid + # (ID 0) if it failed to match a unique task. + data = copy.deepcopy(self._data) + data.pop('id', None) + data.pop('uuid', None) + + taskfilter = TaskFilter(self.warrior) + for key, value in data.items(): + taskfilter.add_filter_param(key, value) + + output = self.warrior.execute_command(['export', '--'] + + taskfilter.get_filter_params()) + + # If more than 1 task has been matched still, raise an exception + if not valid(output): + raise TaskWarriorException( + "Unique identifiers {0} with description: {1} matches " + "multiple tasks: {2}".format( + self['uuid'] or self['id'], self['description'], output) + ) + + new_data = json.loads(output[0]) if only_fields: to_update = dict( [(k, new_data.get(k)) for k in only_fields]) @@ -766,8 +761,8 @@ class TaskFilter(SerializingObject): A set of parameters to filter the task list with. """ - def __init__(self, warrior, filter_params=[]): - self.filter_params = filter_params + def __init__(self, warrior, filter_params=None): + self.filter_params = filter_params or [] super(TaskFilter, self).__init__(warrior) def add_filter(self, filter_str): @@ -794,8 +789,10 @@ class TaskFilter(SerializingObject): # We enforce equality match by using 'is' (or 'none') modifier # Without using this syntax, filter fails due to TW-1479 - modifier = '.is' if value else '.none' - key = key + modifier if '.' not in key else key + # which is, however, fixed in 2.4.5 + if self.warrior.version < VERSION_2_4_5: + modifier = '.is' if value else '.none' + key = key + modifier if '.' not in key else key self.filter_params.append(six.u("{0}:{1}").format(key, value)) @@ -916,124 +913,3 @@ class TaskQuerySet(object): raise ValueError( 'get() returned more than one Task -- it returned {0}! ' 'Lookup parameters were {1}'.format(num, kwargs)) - - -class TaskWarrior(object): - def __init__(self, data_location=None, create=True, taskrc_location='~/.taskrc'): - self.taskrc_location = os.path.expanduser(taskrc_location) - - # If taskrc does not exist, pass / to use defaults and avoid creating - # dummy .taskrc file by TaskWarrior - if not os.path.exists(self.taskrc_location): - self.taskrc_location = '/' - - self.version = self._get_version() - self.config = { - 'confirmation': 'no', - 'dependency.confirmation': 'no', # See TW-1483 or taskrc man page - 'recurrence.confirmation': 'no', # Necessary for modifying R tasks - - # Defaults to on since 2.4.5, we expect off during parsing - 'json.array': 'off', - - # 2.4.3 onwards supports 0 as infite bulk, otherwise set just - # arbitrary big number which is likely to be large enough - 'bulk': 0 if self.version >= VERSION_2_4_3 else 100000, - } - - # Set data.location override if passed via kwarg - if data_location is not None: - data_location = os.path.expanduser(data_location) - if create and not os.path.exists(data_location): - os.makedirs(data_location) - self.config['data.location'] = data_location - - self.tasks = TaskQuerySet(self) - - def _get_command_args(self, args, config_override={}): - command_args = ['task', 'rc:{0}'.format(self.taskrc_location)] - config = self.config.copy() - config.update(config_override) - for item in config.items(): - command_args.append('rc.{0}={1}'.format(*item)) - command_args.extend(map(six.text_type, args)) - return command_args - - def _get_version(self): - p = subprocess.Popen( - ['task', '--version'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - stdout, stderr = [x.decode('utf-8') for x in p.communicate()] - return stdout.strip('\n') - - def get_config(self): - raw_output = self.execute_command( - ['show'], - config_override={'verbose': 'nothing'} - ) - - config = dict() - config_regex = re.compile(r'^(?P[^\s]+)\s+(?P[^\s].+$)') - - for line in raw_output: - match = config_regex.match(line) - if match: - config[match.group('key')] = match.group('value').strip() - - return config - - def execute_command(self, args, config_override={}, allow_failure=True, - return_all=False): - command_args = self._get_command_args( - args, config_override=config_override) - logger.debug(' '.join(command_args)) - p = subprocess.Popen(command_args, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - stdout, stderr = [x.decode('utf-8') for x in p.communicate()] - if p.returncode and allow_failure: - if stderr.strip(): - error_msg = stderr.strip() - else: - error_msg = stdout.strip() - raise TaskWarriorException(error_msg) - - # Return all whole triplet only if explicitly asked for - if not return_all: - return stdout.rstrip().split('\n') - else: - return (stdout.rstrip().split('\n'), - stderr.rstrip().split('\n'), - p.returncode) - - def enforce_recurrence(self): - # Run arbitrary report command which will trigger generation - # of recurrent tasks. - - # Only necessary for TW up to 2.4.1, fixed in 2.4.2. - if self.version < VERSION_2_4_2: - self.execute_command(['next'], allow_failure=False) - - def filter_tasks(self, filter_obj): - self.enforce_recurrence() - args = ['export', '--'] + filter_obj.get_filter_params() - tasks = [] - for line in self.execute_command(args): - if line: - data = line.strip(',') - try: - filtered_task = Task(self) - filtered_task._load_data(json.loads(data)) - tasks.append(filtered_task) - except ValueError: - raise TaskWarriorException('Invalid JSON: %s' % data) - return tasks - - def merge_with(self, path, push=False): - path = path.rstrip('/') + '/' - self.execute_command(['merge', path], config_override={ - 'merge.autopush': 'yes' if push else 'no', - }) - - def undo(self): - self.execute_command(['undo'])