]> git.madduck.net Git - etc/taskwarrior.git/blobdiff - tasklib/task.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Bump version for next release
[etc/taskwarrior.git] / tasklib / task.py
index 5e2b71b66a48d4b87e6ad8c27ccbb932bb76ace4..6fbce776d658bef73b250983d86042273cb28e57 100644 (file)
+from __future__ import print_function
+import copy
+import datetime
 import json
 import json
+import logging
 import os
 import os
+import six
 import subprocess
 import subprocess
-import tempfile
-import uuid
-
 
 
+DATE_FORMAT = '%Y%m%dT%H%M%SZ'
+REPR_OUTPUT_SIZE = 10
 PENDING = 'pending'
 PENDING = 'pending'
+COMPLETED = 'completed'
+
+logger = logging.getLogger(__name__)
 
 
 class TaskWarriorException(Exception):
     pass
 
 
 
 
 class TaskWarriorException(Exception):
     pass
 
 
-class Task(object):
+class TaskResource(object):
+    read_only_fields = []
 
 
-    def __init__(self, warrior, data={}):
-        self.warrior = warrior
+    def _load_data(self, data):
         self._data = data
 
     def __getitem__(self, key):
         self._data = data
 
     def __getitem__(self, key):
-        return self._data.get(key)
+        hydrate_func = getattr(self, 'deserialize_{0}'.format(key),
+                               lambda x: x)
+        return hydrate_func(self._data.get(key))
+
+    def __setitem__(self, key, value):
+        if key in self.read_only_fields:
+            raise RuntimeError('Field \'%s\' is read-only' % key)
+        dehydrate_func = getattr(self, 'serialize_{0}'.format(key),
+                                 lambda x: x)
+        self._data[key] = dehydrate_func(value)
+        self._modified_fields.add(key)
+
+    def __str__(self):
+        s = six.text_type(self.__unicode__())
+        if not six.PY3:
+            s = s.encode('utf-8')
+        return s
+
+
+class TaskAnnotation(TaskResource):
+    read_only_fields = ['entry', 'description']
 
 
-    def __setitem__(self, key, val):
-        self._data[key] = val
+    def __init__(self, task, data={}):
+        self.task = task
+        self._load_data(data)
+
+    def deserialize_entry(self, data):
+        return datetime.datetime.strptime(data, DATE_FORMAT) if data else None
+
+    def serialize_entry(self, date):
+        return date.strftime(DATE_FORMAT) if date else ''
+
+    def remove(self):
+        self.task.remove_annotation(self)
 
     def __unicode__(self):
 
     def __unicode__(self):
-        return self._data.get('description')
+        return self['description']
+
+    __repr__ = __unicode__
+
 
 
-    def regenerate_uuid(self):
-        self['uuid'] = str(uuid.uuid4())
+class Task(TaskResource):
+    read_only_fields = ['id', 'entry', 'urgency']
+
+    class DoesNotExist(Exception):
+        pass
+
+    def __init__(self, warrior, data={}):
+        self.warrior = warrior
+        self._load_data(data)
+        self._modified_fields = set()
+
+    def __unicode__(self):
+        return self['description']
+
+    def serialize_due(self, date):
+        return date.strftime(DATE_FORMAT)
+
+    def deserialize_due(self, date_str):
+        if not date_str:
+            return None
+        return datetime.datetime.strptime(date_str, DATE_FORMAT)
+
+    def deserialize_annotations(self, data):
+        return [TaskAnnotation(self, d) for d in data] if data else []
+
+    def deserialize_tags(self, tags):
+        if isinstance(tags, basestring):
+            return tags.split(',') if tags else []
+        return tags
+
+    def serialize_tags(self, tags):
+        return ','.join(tags) if tags else ''
 
     def delete(self):
 
     def delete(self):
-        self.warrior.delete_task(self['uuid'])
+        self.warrior.execute_command([self['id'], 'delete'], config_override={
+            'confirmation': 'no',
+        })
 
     def done(self):
 
     def done(self):
-        self.warrior.complete_task(self['uuid'])
+        self.warrior.execute_command([self['id'], 'done'])
 
 
-    def save(self, delete_first=True):
-        if self['uuid'] and delete_first:
-            self.delete()
-        if not self['uuid'] or delete_first:
-            self.regenerate_uuid()
-        self.warrior.import_tasks([self._data])
+    def save(self):
+        args = [self['id'], 'modify'] if self['id'] else ['add']
+        args.extend(self._get_modified_fields_as_args())
+        self.warrior.execute_command(args)
+        self._modified_fields.clear()
 
 
-    __repr__ = __unicode__
+    def add_annotation(self, annotation):
+        args = [self['id'], 'annotate', annotation]
+        self.warrior.execute_command(args)
+        self.refresh(only_fields=['annotations'])
 
 
+    def remove_annotation(self, annotation):
+        if isinstance(annotation, TaskAnnotation):
+            annotation = annotation['description']
+        args = [self['id'], 'denotate', annotation]
+        self.warrior.execute_command(args)
+        self.refresh(only_fields=['annotations'])
 
 
-class TaskWarrior(object):
-    DEFAULT_FILTERS = {
-        'status': 'pending',
-    }
+    def _get_modified_fields_as_args(self):
+        args = []
+        for field in self._modified_fields:
+            args.append('{}:{}'.format(field, self._data[field]))
+        return args
+
+    def refresh(self, only_fields=[]):
+        args = [self['uuid'], 'export']
+        new_data = json.loads(self.warrior.execute_command(args)[0])
+        if only_fields:
+            to_update = dict(
+                [(k, new_data.get(k)) for k in only_fields])
+            self._data.update(to_update)
+        else:
+            self._data = new_data
+
+
+class TaskFilter(object):
+    """
+    A set of parameters to filter the task list with.
+    """
+
+    def __init__(self, filter_params=[]):
+        self.filter_params = filter_params
 
 
+    def add_filter(self, filter_str):
+        self.filter_params.append(filter_str)
+
+    def add_filter_param(self, key, value):
+        key = key.replace('__', '.')
+        self.filter_params.append('{0}:{1}'.format(key, value))
+
+    def get_filter_params(self):
+        return [f for f in self.filter_params if f]
+
+    def clone(self):
+        c = self.__class__()
+        c.filter_params = list(self.filter_params)
+        return c
+
+
+class TaskQuerySet(object):
+    """
+    Represents a lazy lookup for a task objects.
+    """
+
+    def __init__(self, warrior=None, filter_obj=None):
+        self.warrior = warrior
+        self._result_cache = None
+        self.filter_obj = filter_obj or TaskFilter()
+
+    def __deepcopy__(self, memo):
+        """
+        Deep copy of a QuerySet doesn't populate the cache
+        """
+        obj = self.__class__()
+        for k, v in self.__dict__.items():
+            if k in ('_iter', '_result_cache'):
+                obj.__dict__[k] = None
+            else:
+                obj.__dict__[k] = copy.deepcopy(v, memo)
+        return obj
+
+    def __repr__(self):
+        data = list(self[:REPR_OUTPUT_SIZE + 1])
+        if len(data) > REPR_OUTPUT_SIZE:
+            data[-1] = "...(remaining elements truncated)..."
+        return repr(data)
+
+    def __len__(self):
+        if self._result_cache is None:
+            self._result_cache = list(self)
+        return len(self._result_cache)
+
+    def __iter__(self):
+        if self._result_cache is None:
+            self._result_cache = self._execute()
+        return iter(self._result_cache)
+
+    def __getitem__(self, k):
+        if self._result_cache is None:
+            self._result_cache = list(self)
+        return self._result_cache.__getitem__(k)
+
+    def __bool__(self):
+        if self._result_cache is not None:
+            return bool(self._result_cache)
+        try:
+            next(iter(self))
+        except StopIteration:
+            return False
+        return True
+
+    def __nonzero__(self):
+        return type(self).__bool__(self)
+
+    def _clone(self, klass=None, **kwargs):
+        if klass is None:
+            klass = self.__class__
+        filter_obj = self.filter_obj.clone()
+        c = klass(warrior=self.warrior, filter_obj=filter_obj)
+        c.__dict__.update(kwargs)
+        return c
+
+    def _execute(self):
+        """
+        Fetch the tasks which match the current filters.
+        """
+        return self.warrior.filter_tasks(self.filter_obj)
+
+    def all(self):
+        """
+        Returns a new TaskQuerySet that is a copy of the current one.
+        """
+        return self._clone()
+
+    def pending(self):
+        return self.filter(status=PENDING)
+
+    def completed(self):
+        return self.filter(status=COMPLETED)
+
+    def filter(self, *args, **kwargs):
+        """
+        Returns a new TaskQuerySet with the given filters added.
+        """
+        clone = self._clone()
+        for f in args:
+            clone.filter_obj.add_filter(f)
+        for key, value in kwargs.items():
+            clone.filter_obj.add_filter_param(key, value)
+        return clone
+
+    def get(self, **kwargs):
+        """
+        Performs the query and returns a single object matching the given
+        keyword arguments.
+        """
+        clone = self.filter(**kwargs)
+        num = len(clone)
+        if num == 1:
+            return clone._result_cache[0]
+        if not num:
+            raise Task.DoesNotExist(
+                'Task matching query does not exist. '
+                'Lookup parameters were {0}'.format(kwargs))
+        raise ValueError(
+            'get() returned more than one Task -- it returned {0}! '
+            'Lookup parameters were {1}'.format(num, kwargs))
+
+
+class TaskWarrior(object):
     def __init__(self, data_location='~/.task', create=True):
     def __init__(self, data_location='~/.task', create=True):
-        if not os.path.exists(data_location):
+        data_location = os.path.expanduser(data_location)
+        if create and not os.path.exists(data_location):
             os.makedirs(data_location)
         self.config = {
             'data.location': os.path.expanduser(data_location),
         }
             os.makedirs(data_location)
         self.config = {
             'data.location': os.path.expanduser(data_location),
         }
+        self.tasks = TaskQuerySet(self)
 
 
-    def _generate_command(self, command):
-        args = ['task', 'rc:/']
-        for item in self.config.items():
-            args.append('rc.{0}={1}'.format(*item))
-        args.append(command)
-        return ' '.join(args)
-
-    def _execute(self, command):
-        p = subprocess.Popen(self._generate_command(command), shell=True,
-                             stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-        stdout, stderr = p.communicate()
+    def _get_command_args(self, args, config_override={}):
+        command_args = ['task', 'rc:/']
+        config = self.config.copy()
+        config.update(config_override)
+        for item in config.items():
+            command_args.append('rc.{0}={1}'.format(*item))
+        command_args.extend(map(str, args))
+        return command_args
+
+    def execute_command(self, args, config_override={}):
+        command_args = self._get_command_args(
+            args, config_override=config_override)
+        logger.debug(' '.join(command_args))
+        p = subprocess.Popen(command_args, stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+        stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
         if p.returncode:
         if p.returncode:
-            raise TaskWarriorException(stderr.strip())
+            if stderr.strip():
+                error_msg = stderr.strip().splitlines()[-1]
+            else:
+                error_msg = stdout.strip()
+            raise TaskWarriorException(error_msg)
         return stdout.strip().split('\n')
 
         return stdout.strip().split('\n')
 
-    def _format_filter_kwarg(self, kwarg):
-        key, val = kwarg[0], kwarg[1]
-        if key in ['tag', 'tags']:
-            key = 'tags.equal'
-        key = key.replace('__', '.')
-        return '{0}:{1}'.format(key, val)
-
-    def get_tasks(self, **filter_kwargs):
-        filters = self.DEFAULT_FILTERS
-        filters.update(filter_kwargs)
-        filter_commands = ' '.join(map(self._format_filter_kwarg,
-                                       filters.items()))
-        command = '{0} export'.format(filter_commands)
+    def filter_tasks(self, filter_obj):
+        args = ['export', '--'] + filter_obj.get_filter_params()
         tasks = []
         tasks = []
-        for line in self._execute(command):
+        for line in self.execute_command(args):
             if line:
             if line:
-                tasks.append(Task(self, json.loads(line.strip(','))))
+                data = line.strip(',')
+                try:
+                    tasks.append(Task(self, json.loads(data)))
+                except ValueError:
+                    raise TaskWarriorException('Invalid JSON: %s' % data)
         return tasks
 
         return tasks
 
-    def get_task(self, task_id):
-        command = '{0} export'.format(task_id)
-        return Task(self, json.loads(self._execute(command)[0]))
-
-    def add_task(self, description, project=None):
-        args = ['add', description]
-        if project is not None:
-            args.append('project:{0}'.format(project))
-        self._execute(' '.join(args))
-
-    def delete_task(self, task_id):
-        self._execute('{0} rc.confirmation:no delete'.format(task_id))
-
-    def complete_task(self, task_id):
-        self._execute('{0} done'.format(task_id))
+    def merge_with(self, path, push=False):
+        path = path.rstrip('/') + '/'
+        self.execute_command(['merge', path], config_override={
+            'merge.autopush': 'yes' if push else 'no',
+        })
 
 
-    def import_tasks(self, tasks):
-        fd, path = tempfile.mkstemp()
-        with open(path, 'w') as f:
-            f.write(json.dumps(tasks))
-        self._execute('import {0}'.format(path))
+    def undo(self):
+        self.execute_command(['undo'], config_override={
+            'confirmation': 'no',
+        })