]> git.madduck.net Git - etc/taskwarrior.git/blobdiff - tasklib/task.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

The next version will be 0.7.1
[etc/taskwarrior.git] / tasklib / task.py
index 80f08e99080fe28a8cb69d563659a405e1f7443d..b6c3d09168ec6897749ade2c2d4ce26ceebf2b33 100644 (file)
+from __future__ import print_function
 import copy
 import datetime
 import json
 import copy
 import datetime
 import json
+import logging
 import os
 import os
+import six
 import subprocess
 import subprocess
-import tempfile
-import uuid
-
 
 DATE_FORMAT = '%Y%m%dT%H%M%SZ'
 
 DATE_FORMAT = '%Y%m%dT%H%M%SZ'
-
 REPR_OUTPUT_SIZE = 10
 REPR_OUTPUT_SIZE = 10
-
 PENDING = 'pending'
 PENDING = 'pending'
+COMPLETED = 'completed'
+
+VERSION_2_1_0 = six.u('2.1.0')
+VERSION_2_2_0 = six.u('2.2.0')
+VERSION_2_3_0 = six.u('2.3.0')
+VERSION_2_4_0 = six.u('2.4.0')
+
+logger = logging.getLogger(__name__)
 
 
 class TaskWarriorException(Exception):
     pass
 
 
 
 
 class TaskWarriorException(Exception):
     pass
 
 
-class Task(object):
+class SerializingObject(object):
+    """
+    Common ancestor for TaskResource & TaskFilter, since they both
+    need to serialize arguments.
+    """
+
+    def _deserialize(self, key, value):
+        hydrate_func = getattr(self, 'deserialize_{0}'.format(key),
+                               lambda x: x if x != '' else None)
+        return hydrate_func(value)
+
+    def _serialize(self, key, value):
+        dehydrate_func = getattr(self, 'serialize_{0}'.format(key),
+                                 lambda x: x if x is not None else '')
+        return dehydrate_func(value)
+
+    def timestamp_serializer(self, date):
+        if not date:
+            return None
+        return date.strftime(DATE_FORMAT)
+
+    def timestamp_deserializer(self, date_str):
+        if not date_str:
+            return None
+        return datetime.datetime.strptime(date_str, DATE_FORMAT)
+
+    def serialize_entry(self, value):
+        return self.timestamp_serializer(value)
+
+    def deserialize_entry(self, value):
+        return self.timestamp_deserializer(value)
+
+    def serialize_modified(self, value):
+        return self.timestamp_serializer(value)
+
+    def deserialize_modified(self, value):
+        return self.timestamp_deserializer(value)
+
+    def serialize_due(self, value):
+        return self.timestamp_serializer(value)
+
+    def deserialize_due(self, value):
+        return self.timestamp_deserializer(value)
+
+    def serialize_scheduled(self, value):
+        return self.timestamp_serializer(value)
+
+    def deserialize_scheduled(self, value):
+        return self.timestamp_deserializer(value)
+
+    def serialize_until(self, value):
+        return self.timestamp_serializer(value)
+
+    def deserialize_until(self, value):
+        return self.timestamp_deserializer(value)
+
+    def serialize_wait(self, value):
+        return self.timestamp_serializer(value)
+
+    def deserialize_wait(self, value):
+        return self.timestamp_deserializer(value)
+
+    def deserialize_annotations(self, data):
+        return [TaskAnnotation(self, d) for d in data] if data else []
+
+    def serialize_tags(self, tags):
+        return ','.join(tags) if tags else ''
+
+    def deserialize_tags(self, tags):
+        if isinstance(tags, basestring):
+            return tags.split(',') if tags else []
+        return tags
+
+    def serialize_depends(self, cur_dependencies):
+        # Return the list of uuids
+        return ','.join(task['uuid'] for task in cur_dependencies)
+
+    def deserialize_depends(self, raw_uuids):
+        raw_uuids = raw_uuids or ''  # Convert None to empty string
+        uuids = raw_uuids.split(',')
+        return set(self.warrior.tasks.get(uuid=uuid) for uuid in uuids if uuid)
+
+
+class TaskResource(SerializingObject):
+    read_only_fields = []
+
+    def _load_data(self, data):
+        self._data = data
+        # We need to use a copy for original data, so that changes
+        # are not propagated. Shallow copy is alright, since data dict uses only
+        # primitive data types
+        self._original_data = data.copy()
+
+    def _update_data(self, data, update_original=False):
+        """
+        Low level update of the internal _data dict. Data which are coming as
+        updates should already be serialized. If update_original is True, the
+        original_data dict is updated as well.
+        """
+
+        self._data.update(data)
+
+        if update_original:
+            self._original_data.update(data)
+
+    def __getitem__(self, key):
+        # This is a workaround to make TaskResource non-iterable
+        # over simple index-based iteration
+        try:
+            int(key)
+            raise StopIteration
+        except ValueError:
+            pass
+
+        return self._deserialize(key, self._data.get(key))
+
+    def __setitem__(self, key, value):
+        if key in self.read_only_fields:
+            raise RuntimeError('Field \'%s\' is read-only' % key)
+        self._data[key] = self._serialize(key, value)
+
+    def __str__(self):
+        s = six.text_type(self.__unicode__())
+        if not six.PY3:
+            s = s.encode('utf-8')
+        return s
+
+    def __repr__(self):
+        return str(self)
+
+
+class TaskAnnotation(TaskResource):
+    read_only_fields = ['entry', 'description']
+
+    def __init__(self, task, data={}):
+        self.task = task
+        self._load_data(data)
+
+    def remove(self):
+        self.task.remove_annotation(self)
+
+    def __unicode__(self):
+        return self['description']
+
+    __repr__ = __unicode__
+
+
+class Task(TaskResource):
+    read_only_fields = ['id', 'entry', 'urgency', 'uuid', 'modified']
 
     class DoesNotExist(Exception):
         pass
 
 
     class DoesNotExist(Exception):
         pass
 
-    def __init__(self, warrior, data={}):
+    class CompletedTask(Exception):
+        """
+        Raised when the operation cannot be performed on the completed task.
+        """
+        pass
+
+    class DeletedTask(Exception):
+        """
+        Raised when the operation cannot be performed on the deleted task.
+        """
+        pass
+
+    class NotSaved(Exception):
+        """
+        Raised when the operation cannot be performed on the task, because
+        it has not been saved to TaskWarrior yet.
+        """
+        pass
+
+    def __init__(self, warrior, **kwargs):
         self.warrior = warrior
         self.warrior = warrior
-        self._data = data
 
 
-    def __getitem__(self, key):
-        return self._get_field(key)
+        # Check that user is not able to set read-only value in __init__
+        for key in kwargs.keys():
+            if key in self.read_only_fields:
+                raise RuntimeError('Field \'%s\' is read-only' % key)
 
 
-    def __setitem__(self, key, val):
-        self._data[key] = val
+        # We serialize the data in kwargs so that users of the library
+        # do not have to pass different data formats via __setitem__ and
+        # __init__ methods, that would be confusing
+
+        # Rather unfortunate syntax due to python2.6 comaptiblity
+        self._load_data(dict((key, self._serialize(key, value))
+                        for (key, value) in six.iteritems(kwargs)))
 
     def __unicode__(self):
 
     def __unicode__(self):
-        return self._data.get('description')
+        return self['description']
+
+    def __eq__(self, other):
+        if self['uuid'] and other['uuid']:
+            # For saved Tasks, just define equality by equality of uuids
+            return self['uuid'] == other['uuid']
+        else:
+            # If the tasks are not saved, compare the actual instances
+            return id(self) == id(other)
+
+
+    def __hash__(self):
+        if self['uuid']:
+            # For saved Tasks, just define equality by equality of uuids
+            return self['uuid'].__hash__()
+        else:
+            # If the tasks are not saved, return hash of instance id
+            return id(self).__hash__()
+
+    @property
+    def _modified_fields(self):
+        writable_fields = set(self._data.keys()) - set(self.read_only_fields)
+        for key in writable_fields:
+            if self._data.get(key) != self._original_data.get(key):
+                yield key
+
+    @property
+    def completed(self):
+        return self['status'] == six.text_type('completed')
+
+    @property
+    def deleted(self):
+        return self['status'] == six.text_type('deleted')
+
+    @property
+    def waiting(self):
+        return self['status'] == six.text_type('waiting')
+
+    @property
+    def pending(self):
+        return self['status'] == six.text_type('pending')
 
 
-    def _get_field(self, key):
-        hydrate_func = getattr(self, 'deserialize_{0}'.format(key), lambda x:x)
-        return hydrate_func(self._data.get(key))
+    @property
+    def saved(self):
+        return self['uuid'] is not None or self['id'] is not None
 
 
-    def _set_field(self, key, value):
-        dehydrate_func = getattr(self, 'serialize_{0}'.format(key), lambda x:x)
-        self._data[key] = dehydrate_func(value)
+    def serialize_depends(self, cur_dependencies):
+        # Check that all the tasks are saved
+        for task in cur_dependencies:
+            if not task.saved:
+                raise Task.NotSaved('Task \'%s\' needs to be saved before '
+                                    'it can be set as dependency.' % task)
 
 
-    def serialize_due(self, date):
-        return date.strftime(DATE_FORMAT)
+        return super(Task, self).serialize_depends(cur_dependencies)
 
 
-    def deserialize_due(self, date_str):
-        if not date_str:
-            return None
-        return datetime.datetime.strptime(date_str, DATE_FORMAT)
+    def format_depends(self):
+        # We need to generate added and removed dependencies list,
+        # since Taskwarrior does not accept redefining dependencies.
 
 
-    def serialize_annotations(self, annotations):
-        ann_list = list(annotations)
-        for ann in ann_list:
-            ann['entry'] = ann['entry'].strftime(DATE_FORMAT)
-        return ann_list
+        # This cannot be part of serialize_depends, since we need
+        # to keep a list of all depedencies in the _data dictionary,
+        # not just currently added/removed ones
 
 
-    def deserialize_annotations(self, annotations):
-        ann_list = list(annotations)
-        for ann in ann_list:
-            ann['entry'] = datetime.datetime.strptime(
-                ann['entry'], DATE_FORMAT)
-        return ann_list
+        old_dependencies_raw = self._original_data.get('depends','')
+        old_dependencies = self.deserialize_depends(old_dependencies_raw)
 
 
-    def regenerate_uuid(self):
-        self['uuid'] = str(uuid.uuid4())
+        added = self['depends'] - old_dependencies
+        removed = old_dependencies - self['depends']
+
+        # Removed dependencies need to be prefixed with '-'
+        return 'depends:' + ','.join(
+                [t['uuid'] for t in added] +
+                ['-' + t['uuid'] for t in removed]
+            )
+
+    def format_description(self):
+        # Task version older than 2.4.0 ignores first word of the
+        # task description if description: prefix is used
+        if self.warrior.version < VERSION_2_4_0:
+            return self._data['description']
+        else:
+            return "description:'{0}'".format(self._data['description'] or '')
 
     def delete(self):
 
     def delete(self):
-        self.warrior.delete_task(self['uuid'])
+        if not self.saved:
+            raise Task.NotSaved("Task needs to be saved before it can be deleted")
 
 
-    def done(self):
-        self.warrior.complete_task(self['uuid'])
+        # Refresh the status, and raise exception if the task is deleted
+        self.refresh(only_fields=['status'])
 
 
-    def save(self, delete_first=True):
-        if self['uuid'] and delete_first:
-            self.delete()
-        if not self['uuid'] or delete_first:
-            self.regenerate_uuid()
-        self.warrior.import_tasks([self._data])
+        if self.deleted:
+            raise Task.DeletedTask("Task was already deleted")
 
 
-    __repr__ = __unicode__
+        self.warrior.execute_command([self['uuid'], 'delete'])
+
+        # Refresh the status again, so that we have updated info stored
+        self.refresh(only_fields=['status'])
 
 
 
 
-class TaskFilter(object):
+    def done(self):
+        if not self.saved:
+            raise Task.NotSaved("Task needs to be saved before it can be completed")
+
+        # Refresh, and raise exception if task is already completed/deleted
+        self.refresh(only_fields=['status'])
+
+        if self.completed:
+            raise Task.CompletedTask("Cannot complete a completed task")
+        elif self.deleted:
+            raise Task.DeletedTask("Deleted task cannot be completed")
+
+        self.warrior.execute_command([self['uuid'], 'done'])
+
+        # Refresh the status again, so that we have updated info stored
+        self.refresh(only_fields=['status'])
+
+    def save(self):
+        args = [self['uuid'], 'modify'] if self.saved else ['add']
+        args.extend(self._get_modified_fields_as_args())
+        output = self.warrior.execute_command(args)
+
+        # Parse out the new ID, if the task is being added for the first time
+        if not self.saved:
+            id_lines = [l for l in output if l.startswith('Created task ')]
+
+            # Complain loudly if it seems that more tasks were created
+            # Should not happen
+            if len(id_lines) != 1 or len(id_lines[0].split(' ')) != 3:
+                raise TaskWarriorException("Unexpected output when creating "
+                                           "task: %s" % '\n'.join(id_lines))
+
+            # Circumvent the ID storage, since ID is considered read-only
+            self._data['id'] = int(id_lines[0].split(' ')[2].rstrip('.'))
+
+        self.refresh()
+
+    def add_annotation(self, annotation):
+        if not self.saved:
+            raise Task.NotSaved("Task needs to be saved to add annotation")
+
+        args = [self['uuid'], 'annotate', annotation]
+        self.warrior.execute_command(args)
+        self.refresh(only_fields=['annotations'])
+
+    def remove_annotation(self, annotation):
+        if not self.saved:
+            raise Task.NotSaved("Task needs to be saved to remove annotation")
+
+        if isinstance(annotation, TaskAnnotation):
+            annotation = annotation['description']
+        args = [self['uuid'], 'denotate', annotation]
+        self.warrior.execute_command(args)
+        self.refresh(only_fields=['annotations'])
+
+    def _get_modified_fields_as_args(self):
+        args = []
+
+        def add_field(field):
+            # Add the output of format_field method to args list (defaults to
+            # field:'value')
+            format_default = lambda k: "{0}:{1}".format(k,
+                                           "'{0}'".format(self._data[k])
+                                           if self._data[k] is not None
+                                           else '')
+            format_func = getattr(self, 'format_{0}'.format(field),
+                                  lambda: format_default(field))
+            args.append(format_func())
+
+        # If we're modifying saved task, simply pass on all modified fields
+        if self.saved:
+            for field in self._modified_fields:
+                add_field(field)
+        # For new tasks, pass all fields that make sense
+        else:
+            for field in self._data.keys():
+                if field in self.read_only_fields:
+                    continue
+                add_field(field)
+
+        return args
+
+    def refresh(self, only_fields=[]):
+        # Raise error when trying to refresh a task that has not been saved
+        if not self.saved:
+            raise Task.NotSaved("Task needs to be saved to be refreshed")
+
+        # We need to use ID as backup for uuid here for the refreshes
+        # of newly saved tasks. Any other place in the code is fine
+        # with using UUID only.
+        args = [self['uuid'] or self['id'], 'export']
+        new_data = json.loads(self.warrior.execute_command(args)[0])
+        if only_fields:
+            to_update = dict(
+                [(k, new_data.get(k)) for k in only_fields])
+            self._update_data(to_update, update_original=True)
+        else:
+            self._load_data(new_data)
+
+
+class TaskFilter(SerializingObject):
     """
     A set of parameters to filter the task list with.
     """
     """
     A set of parameters to filter the task list with.
     """
@@ -97,10 +425,29 @@ class TaskFilter(object):
 
     def add_filter_param(self, key, value):
         key = key.replace('__', '.')
 
     def add_filter_param(self, key, value):
         key = key.replace('__', '.')
-        self.filter_params.append('{0}:{1}'.format(key, value))
+
+        # Replace the value with empty string, since that is the
+        # convention in TW for empty values
+        attribute_key = key.split('.')[0]
+        value = self._serialize(attribute_key, value)
+
+        # If we are filtering by uuid:, do not use uuid keyword
+        # due to TW-1452 bug
+        if key == 'uuid':
+            self.filter_params.insert(0, value)
+        else:
+            # Surround value with aphostrophes unless it's a empty string
+            value = "'%s'" % value if value else ''
+
+            # We enforce equality match by using 'is' (or 'none') modifier
+            # Without using this syntax, filter fails due to TW-1479
+            modifier = '.is' if value else '.none'
+            key = key + modifier if '.' not in key else key
+
+            self.filter_params.append("{0}:{1}".format(key, value))
 
     def get_filter_params(self):
 
     def get_filter_params(self):
-        return ['({})'.format(f) for f in self.filter_params if f]
+        return [f for f in self.filter_params if f]
 
     def clone(self):
         c = self.__class__()
 
     def clone(self):
         c = self.__class__()
@@ -123,8 +470,8 @@ class TaskQuerySet(object):
         Deep copy of a QuerySet doesn't populate the cache
         """
         obj = self.__class__()
         Deep copy of a QuerySet doesn't populate the cache
         """
         obj = self.__class__()
-        for k,v in self.__dict__.items():
-            if k in ('_iter','_result_cache'):
+        for k, v in self.__dict__.items():
+            if k in ('_iter', '_result_cache'):
                 obj.__dict__[k] = None
             else:
                 obj.__dict__[k] = copy.deepcopy(v, memo)
                 obj.__dict__[k] = None
             else:
                 obj.__dict__[k] = copy.deepcopy(v, memo)
@@ -175,7 +522,7 @@ class TaskQuerySet(object):
         """
         Fetch the tasks which match the current filters.
         """
         """
         Fetch the tasks which match the current filters.
         """
-        return self.warrior._execute_filter(self.filter_obj)
+        return self.warrior.filter_tasks(self.filter_obj)
 
     def all(self):
         """
 
     def all(self):
         """
@@ -186,6 +533,9 @@ class TaskQuerySet(object):
     def pending(self):
         return self.filter(status=PENDING)
 
     def pending(self):
         return self.filter(status=PENDING)
 
+    def completed(self):
+        return self.filter(status=COMPLETED)
+
     def filter(self, *args, **kwargs):
         """
         Returns a new TaskQuerySet with the given filters added.
     def filter(self, *args, **kwargs):
         """
         Returns a new TaskQuerySet with the given filters added.
@@ -216,58 +566,69 @@ class TaskQuerySet(object):
 
 
 class TaskWarrior(object):
 
 
 class TaskWarrior(object):
-    DEFAULT_FILTERS = {
-        'status': 'pending',
-    }
-
     def __init__(self, data_location='~/.task', create=True):
     def __init__(self, data_location='~/.task', create=True):
-        if not os.path.exists(data_location):
+        data_location = os.path.expanduser(data_location)
+        if create and not os.path.exists(data_location):
             os.makedirs(data_location)
         self.config = {
             'data.location': os.path.expanduser(data_location),
             os.makedirs(data_location)
         self.config = {
             'data.location': os.path.expanduser(data_location),
+            'confirmation': 'no',
+            'dependency.confirmation': 'no', # See TW-1483 or taskrc man page
         }
         self.tasks = TaskQuerySet(self)
         }
         self.tasks = TaskQuerySet(self)
+        self.version = self._get_version()
 
 
-    def _get_command_args(self, args):
+    def _get_command_args(self, args, config_override={}):
         command_args = ['task', 'rc:/']
         command_args = ['task', 'rc:/']
-        for item in self.config.items():
+        config = self.config.copy()
+        config.update(config_override)
+        for item in config.items():
             command_args.append('rc.{0}={1}'.format(*item))
             command_args.append('rc.{0}={1}'.format(*item))
-        command_args.extend(args)
+        command_args.extend(map(str, args))
         return command_args
 
         return command_args
 
-    def _execute_command(self, args):
-        p = subprocess.Popen(self._get_command_args(args),
-                             stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-        stdout, stderr = p.communicate()
+    def _get_version(self):
+        p = subprocess.Popen(
+                ['task', '--version'],
+                stdout=subprocess.PIPE,
+                stderr=subprocess.PIPE)
+        stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
+        return stdout.strip('\n')
+
+    def execute_command(self, args, config_override={}):
+        command_args = self._get_command_args(
+            args, config_override=config_override)
+        logger.debug(' '.join(command_args))
+        p = subprocess.Popen(command_args, stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
         if p.returncode:
         if p.returncode:
-            raise TaskWarriorException(stderr.strip())
+            if stderr.strip():
+                error_msg = stderr.strip().splitlines()[-1]
+            else:
+                error_msg = stdout.strip()
+            raise TaskWarriorException(error_msg)
         return stdout.strip().split('\n')
 
         return stdout.strip().split('\n')
 
-    def _execute_filter(self, filter_obj):
-        args = filter_obj.get_filter_params() + ['export']
+    def filter_tasks(self, filter_obj):
+        args = ['export', '--'] + filter_obj.get_filter_params()
         tasks = []
         tasks = []
-        for line in self._execute_command(args):
+        for line in self.execute_command(args):
             if line:
             if line:
-                tasks.append(Task(self, json.loads(line.strip(','))))
+                data = line.strip(',')
+                try:
+                    filtered_task = Task(self)
+                    filtered_task._load_data(json.loads(data))
+                    tasks.append(filtered_task)
+                except ValueError:
+                    raise TaskWarriorException('Invalid JSON: %s' % data)
         return tasks
 
         return tasks
 
-    def add_task(self, description, project=None):
-        args = ['add', description]
-        if project is not None:
-            args.append('project:{0}'.format(project))
-        self._execute_command(args)
-
-    def delete_task(self, task_id):
-        args = [task_id, 'rc.confirmation:no', 'delete']
-        self._execute_command(args)
-
-    def complete_task(self, task_id):
-        args = [task_id, 'done']
-        self._execute_command(args)
-
-    def import_tasks(self, tasks):
-        fd, path = tempfile.mkstemp()
-        with open(path, 'w') as f:
-            f.write(json.dumps(tasks))
-        args = ['import', path]
-        self._execute_command(args)
+    def merge_with(self, path, push=False):
+        path = path.rstrip('/') + '/'
+        self.execute_command(['merge', path], config_override={
+            'merge.autopush': 'yes' if push else 'no',
+        })
+
+    def undo(self):
+        self.execute_command(['undo'])