]> git.madduck.net Git - etc/taskwarrior.git/blobdiff - tasklib/task.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Merge branch 'develop'
[etc/taskwarrior.git] / tasklib / task.py
index 09535881550207dff67ed436f1a438f15b7a5949..20bff1fef9026cc0db9e183c32cd757b0fe6f2f1 100644 (file)
 from __future__ import print_function
 import copy
 from __future__ import print_function
 import copy
-import datetime
+import importlib
 import json
 import logging
 import os
 import six
 import json
 import logging
 import os
 import six
-import subprocess
+import sys
+
+from .serializing import SerializingObject
 
 DATE_FORMAT = '%Y%m%dT%H%M%SZ'
 REPR_OUTPUT_SIZE = 10
 PENDING = 'pending'
 COMPLETED = 'completed'
 
 DATE_FORMAT = '%Y%m%dT%H%M%SZ'
 REPR_OUTPUT_SIZE = 10
 PENDING = 'pending'
 COMPLETED = 'completed'
+DELETED = 'deleted'
+WAITING = 'waiting'
+RECURRING = 'recurring'
 
 logger = logging.getLogger(__name__)
 
 
 
 logger = logging.getLogger(__name__)
 
 
-class TaskWarriorException(Exception):
-    pass
+class ReadOnlyDictView(object):
+    """
+    Provides simplified read-only view upon dict object.
+    """
+
+    def __init__(self, viewed_dict):
+        self.viewed_dict = viewed_dict
+
+    def __getitem__(self, key):
+        return copy.deepcopy(self.viewed_dict.__getitem__(key))
+
+    def __contains__(self, k):
+        return self.viewed_dict.__contains__(k)
+
+    def __iter__(self):
+        for value in self.viewed_dict:
+            yield copy.deepcopy(value)
+
+    def __len__(self):
+        return len(self.viewed_dict)
+
+    def __unicode__(self):
+        return six.u('ReadOnlyDictView: {0}'.format(repr(self.viewed_dict)))
+
+    __repr__ = __unicode__
+
+    def get(self, key, default=None):
+        return copy.deepcopy(self.viewed_dict.get(key, default))
+
+    def items(self):
+        return [copy.deepcopy(v) for v in self.viewed_dict.items()]
 
 
+    def values(self):
+        return [copy.deepcopy(v) for v in self.viewed_dict.values()]
 
 
-class TaskResource(object):
+
+class TaskResource(SerializingObject):
     read_only_fields = []
 
     def _load_data(self, data):
     read_only_fields = []
 
     def _load_data(self, data):
-        self._data = data
+        self._data = dict((key, self._deserialize(key, value))
+                          for key, value in data.items())
+        # We need to use a copy for original data, so that changes
+        # are not propagated.
+        self._original_data = copy.deepcopy(self._data)
+
+    def _update_data(self, data, update_original=False, remove_missing=False):
+        """
+        Low level update of the internal _data dict. Data which are coming as
+        updates should already be serialized. If update_original is True, the
+        original_data dict is updated as well.
+        """
+        self._data.update(dict((key, self._deserialize(key, value))
+                               for key, value in data.items()))
+
+        # In certain situations, we want to treat missing keys as removals
+        if remove_missing:
+            for key in set(self._data.keys()) - set(data.keys()):
+                self._data[key] = None
+
+        if update_original:
+            self._original_data = copy.deepcopy(self._data)
 
     def __getitem__(self, key):
 
     def __getitem__(self, key):
-        hydrate_func = getattr(self, 'deserialize_{0}'.format(key),
-                               lambda x: x)
-        return hydrate_func(self._data.get(key))
+        # This is a workaround to make TaskResource non-iterable
+        # over simple index-based iteration
+        try:
+            int(key)
+            raise StopIteration
+        except ValueError:
+            pass
+
+        if key not in self._data:
+            self._data[key] = self._deserialize(key, None)
+
+        return self._data.get(key)
 
     def __setitem__(self, key, value):
         if key in self.read_only_fields:
             raise RuntimeError('Field \'%s\' is read-only' % key)
 
     def __setitem__(self, key, value):
         if key in self.read_only_fields:
             raise RuntimeError('Field \'%s\' is read-only' % key)
-        dehydrate_func = getattr(self, 'serialize_{0}'.format(key),
-                                 lambda x: x)
-        self._data[key] = dehydrate_func(value)
-        self._modified_fields.add(key)
+
+        # Normalize the user input before saving it
+        value = self._normalize(key, value)
+        self._data[key] = value
 
     def __str__(self):
         s = six.text_type(self.__unicode__())
 
     def __str__(self):
         s = six.text_type(self.__unicode__())
@@ -47,19 +114,48 @@ class TaskResource(object):
     def __repr__(self):
         return str(self)
 
     def __repr__(self):
         return str(self)
 
+    def export_data(self):
+        """
+        Exports current data contained in the Task as JSON
+        """
+
+        # We need to remove spaces for TW-1504, use custom separators
+        data_tuples = ((key, self._serialize(key, value))
+                       for key, value in six.iteritems(self._data))
+
+        # Empty string denotes empty serialized value, we do not want
+        # to pass that to TaskWarrior.
+        data_tuples = filter(lambda t: t[1] is not '', data_tuples)
+        data = dict(data_tuples)
+        return json.dumps(data, separators=(',', ':'))
+
+    @property
+    def _modified_fields(self):
+        writable_fields = set(self._data.keys()) - set(self.read_only_fields)
+        for key in writable_fields:
+            new_value = self._data.get(key)
+            old_value = self._original_data.get(key)
+
+            # Make sure not to mark data removal as modified field if the
+            # field originally had some empty value
+            if key in self._data and not new_value and not old_value:
+                continue
+
+            if new_value != old_value:
+                yield key
+
+    @property
+    def modified(self):
+        return bool(list(self._modified_fields))
+
 
 class TaskAnnotation(TaskResource):
     read_only_fields = ['entry', 'description']
 
 
 class TaskAnnotation(TaskResource):
     read_only_fields = ['entry', 'description']
 
-    def __init__(self, task, data={}):
+    def __init__(self, task, data=None):
         self.task = task
         self.task = task
-        self._load_data(data)
-
-    def deserialize_entry(self, data):
-        return datetime.datetime.strptime(data, DATE_FORMAT) if data else None
-
-    def serialize_entry(self, date):
-        return date.strftime(DATE_FORMAT) if date else ''
+        self._load_data(data or dict())
+        super(TaskAnnotation, self).__init__(task.backend)
 
     def remove(self):
         self.task.remove_annotation(self)
 
     def remove(self):
         self.task.remove_annotation(self)
@@ -67,11 +163,19 @@ class TaskAnnotation(TaskResource):
     def __unicode__(self):
         return self['description']
 
     def __unicode__(self):
         return self['description']
 
+    def __eq__(self, other):
+        # consider 2 annotations equal if they belong to the same task, and
+        # their data dics are the same
+        return self.task == other.task and self._data == other._data
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
     __repr__ = __unicode__
 
 
 class Task(TaskResource):
     __repr__ = __unicode__
 
 
 class Task(TaskResource):
-    read_only_fields = ['id', 'entry', 'urgency', 'uuid']
+    read_only_fields = ['id', 'entry', 'urgency', 'uuid', 'modified']
 
     class DoesNotExist(Exception):
         pass
 
     class DoesNotExist(Exception):
         pass
@@ -88,6 +192,18 @@ class Task(TaskResource):
         """
         pass
 
         """
         pass
 
+    class ActiveTask(Exception):
+        """
+        Raised when the operation cannot be performed on the active task.
+        """
+        pass
+
+    class InactiveTask(Exception):
+        """
+        Raised when the operation cannot be performed on an inactive task.
+        """
+        pass
+
     class NotSaved(Exception):
         """
         Raised when the operation cannot be performed on the task, because
     class NotSaved(Exception):
         """
         Raised when the operation cannot be performed on the task, because
@@ -95,14 +211,89 @@ class Task(TaskResource):
         """
         pass
 
         """
         pass
 
-    def __init__(self, warrior, data={}):
-        self.warrior = warrior
-        self._load_data(data)
-        self._modified_fields = set()
+    @classmethod
+    def from_input(cls, input_file=sys.stdin, modify=None, backend=None):
+        """
+        Creates a Task object, directly from the stdin, by reading one line.
+        If modify=True, two lines are used, first line interpreted as the
+        original state of the Task object, and second line as its new,
+        modified value. This is consistent with the TaskWarrior's hook
+        system.
+
+        Object created by this method should not be saved, deleted
+        or refreshed, as t could create a infinite loop. For this
+        reason, TaskWarrior instance is set to None.
+
+        Input_file argument can be used to specify the input file,
+        but defaults to sys.stdin.
+        """
+
+        # Detect the hook type if not given directly
+        name = os.path.basename(sys.argv[0])
+        modify = name.startswith('on-modify') if modify is None else modify
+
+        # Create the TaskWarrior instance if none passed
+        if backend is None:
+            backends = importlib.import_module('tasklib.backends')
+            hook_parent_dir = os.path.dirname(os.path.dirname(sys.argv[0]))
+            backend = backends.TaskWarrior(data_location=hook_parent_dir)
+
+        # TaskWarrior instance is set to None
+        task = cls(backend)
+
+        # Load the data from the input
+        task._load_data(json.loads(input_file.readline().strip()))
+
+        # If this is a on-modify event, we are provided with additional
+        # line of input, which provides updated data
+        if modify:
+            task._update_data(json.loads(input_file.readline().strip()),
+                              remove_missing=True)
+
+        return task
+
+    def __init__(self, backend, **kwargs):
+        super(Task, self).__init__(backend)
+
+        # Check that user is not able to set read-only value in __init__
+        for key in kwargs.keys():
+            if key in self.read_only_fields:
+                raise RuntimeError('Field \'%s\' is read-only' % key)
+
+        # We serialize the data in kwargs so that users of the library
+        # do not have to pass different data formats via __setitem__ and
+        # __init__ methods, that would be confusing
+
+        # Rather unfortunate syntax due to python2.6 comaptiblity
+        self._data = dict((key, self._normalize(key, value))
+                          for (key, value) in six.iteritems(kwargs))
+        self._original_data = copy.deepcopy(self._data)
+
+        # Provide read only access to the original data
+        self.original = ReadOnlyDictView(self._original_data)
 
     def __unicode__(self):
         return self['description']
 
 
     def __unicode__(self):
         return self['description']
 
+    def __eq__(self, other):
+        if self['uuid'] and other['uuid']:
+            # For saved Tasks, just define equality by equality of uuids
+            return self['uuid'] == other['uuid']
+        else:
+            # If the tasks are not saved, compare the actual instances
+            return id(self) == id(other)
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def __hash__(self):
+        if self['uuid']:
+            # For saved Tasks, just define equality by equality of uuids
+            return self['uuid'].__hash__()
+        else:
+            # If the tasks are not saved, return hash of instance id
+            return id(self).__hash__()
+
     @property
     def completed(self):
         return self['status'] == six.text_type('completed')
     @property
     def completed(self):
         return self['status'] == six.text_type('completed')
@@ -120,162 +311,140 @@ class Task(TaskResource):
         return self['status'] == six.text_type('pending')
 
     @property
         return self['status'] == six.text_type('pending')
 
     @property
-    def saved(self):
-        return self['uuid'] is not None or self['id'] is not None
+    def recurring(self):
+        return self['status'] == six.text_type('recurring')
 
 
-    def serialize_due(self, date):
-        return date.strftime(DATE_FORMAT)
-
-    def deserialize_due(self, date_str):
-        if not date_str:
-            return None
-        return datetime.datetime.strptime(date_str, DATE_FORMAT)
+    @property
+    def active(self):
+        return self['start'] is not None
 
 
-    def deserialize_annotations(self, data):
-        return [TaskAnnotation(self, d) for d in data] if data else []
+    @property
+    def saved(self):
+        return self['uuid'] is not None or self['id'] is not None
 
 
-    def deserialize_tags(self, tags):
-        if isinstance(tags, basestring):
-            return tags.split(',') if tags else []
-        return tags
+    def serialize_depends(self, cur_dependencies):
+        # Check that all the tasks are saved
+        for task in (cur_dependencies or set()):
+            if not task.saved:
+                raise Task.NotSaved(
+                    'Task \'%s\' needs to be saved before '
+                    'it can be set as dependency.' % task,
+                )
 
 
-    def serialize_tags(self, tags):
-        return ','.join(tags) if tags else ''
+        return super(Task, self).serialize_depends(cur_dependencies)
 
     def delete(self):
         if not self.saved:
 
     def delete(self):
         if not self.saved:
-            raise self.NotSaved("Task needs to be saved before it can be deleted")
+            raise Task.NotSaved(
+                'Task needs to be saved before it can be deleted',
+            )
 
         # Refresh the status, and raise exception if the task is deleted
         self.refresh(only_fields=['status'])
 
         if self.deleted:
 
         # Refresh the status, and raise exception if the task is deleted
         self.refresh(only_fields=['status'])
 
         if self.deleted:
-            raise self.DeletedTask("Task was already deleted")
+            raise Task.DeletedTask('Task was already deleted')
 
 
-        self.warrior.execute_command([self['uuid'], 'delete'], config_override={
-            'confirmation': 'no',
-        })
+        self.backend.delete_task(self)
 
         # Refresh the status again, so that we have updated info stored
 
         # Refresh the status again, so that we have updated info stored
-        self.refresh(only_fields=['status'])
-
+        self.refresh(only_fields=['status', 'start', 'end'])
 
 
-    def done(self):
+    def start(self):
         if not self.saved:
         if not self.saved:
-            raise self.NotSaved("Task needs to be saved before it can be completed")
+            raise Task.NotSaved(
+                'Task needs to be saved before it can be started',
+            )
 
         # Refresh, and raise exception if task is already completed/deleted
         self.refresh(only_fields=['status'])
 
         if self.completed:
 
         # Refresh, and raise exception if task is already completed/deleted
         self.refresh(only_fields=['status'])
 
         if self.completed:
-            raise self.CompletedTask("Cannot complete a completed task")
+            raise Task.CompletedTask('Cannot start a completed task')
         elif self.deleted:
         elif self.deleted:
-            raise self.DeletedTask("Deleted task cannot be completed")
+            raise Task.DeletedTask('Deleted task cannot be started')
+        elif self.active:
+            raise Task.ActiveTask('Task is already active')
 
 
-        self.warrior.execute_command([self['uuid'], 'done'])
+        self.backend.start_task(self)
 
         # Refresh the status again, so that we have updated info stored
 
         # Refresh the status again, so that we have updated info stored
+        self.refresh(only_fields=['status', 'start'])
+
+    def stop(self):
+        if not self.saved:
+            raise Task.NotSaved(
+                'Task needs to be saved before it can be stopped',
+            )
+
+        # Refresh, and raise exception if task is already completed/deleted
         self.refresh(only_fields=['status'])
 
         self.refresh(only_fields=['status'])
 
-    def save(self):
-        args = [self['uuid'], 'modify'] if self.saved else ['add']
-        args.extend(self._get_modified_fields_as_args())
-        output = self.warrior.execute_command(args)
+        if not self.active:
+            raise Task.InactiveTask('Cannot stop an inactive task')
 
 
-        # Parse out the new ID, if the task is being added for the first time
+        self.backend.stop_task(self)
+
+        # Refresh the status again, so that we have updated info stored
+        self.refresh(only_fields=['status', 'start'])
+
+    def done(self):
         if not self.saved:
         if not self.saved:
-            id_lines = [l for l in output if l.startswith('Created task ')]
+            raise Task.NotSaved(
+                'Task needs to be saved before it can be completed',
+            )
 
 
-            # Complain loudly if it seems that more tasks were created
-            # Should not happen
-            if len(id_lines) != 1 or len(id_lines[0].split(' ')) != 3:
-                raise TaskWarriorException("Unexpected output when creating "
-                                           "task: %s" % '\n'.join(id_lines))
+        # Refresh, and raise exception if task is already completed/deleted
+        self.refresh(only_fields=['status'])
 
 
-            # Circumvent the ID storage, since ID is considered read-only
-            self._data['id'] = int(id_lines[0].split(' ')[2].rstrip('.'))
+        if self.completed:
+            raise Task.CompletedTask('Cannot complete a completed task')
+        elif self.deleted:
+            raise Task.DeletedTask('Deleted task cannot be completed')
+
+        self.backend.complete_task(self)
+
+        # Refresh the status again, so that we have updated info stored
+        self.refresh(only_fields=['status', 'start', 'end'])
 
 
-        self._modified_fields.clear()
-        self.refresh()
+    def save(self):
+        if self.saved and not self.modified:
+            return
+
+        # All the actual work is done by the backend
+        self.backend.save_task(self)
 
     def add_annotation(self, annotation):
         if not self.saved:
 
     def add_annotation(self, annotation):
         if not self.saved:
-            raise self.NotSaved("Task needs to be saved to add annotation")
+            raise Task.NotSaved('Task needs to be saved to add annotation')
 
 
-        args = [self['uuid'], 'annotate', annotation]
-        self.warrior.execute_command(args)
+        self.backend.annotate_task(self, annotation)
         self.refresh(only_fields=['annotations'])
 
     def remove_annotation(self, annotation):
         if not self.saved:
         self.refresh(only_fields=['annotations'])
 
     def remove_annotation(self, annotation):
         if not self.saved:
-            raise self.NotSaved("Task needs to be saved to add annotation")
+            raise Task.NotSaved('Task needs to be saved to remove annotation')
 
         if isinstance(annotation, TaskAnnotation):
             annotation = annotation['description']
 
         if isinstance(annotation, TaskAnnotation):
             annotation = annotation['description']
-        args = [self['uuid'], 'denotate', annotation]
-        self.warrior.execute_command(args)
-        self.refresh(only_fields=['annotations'])
-
-    def _get_modified_fields_as_args(self):
-        args = []
 
 
-        # If we're modifying saved task, simply pass on all modified fields
-        if self.saved:
-            for field in self._modified_fields:
-                args.append('{0}:{1}'.format(field, self._data[field]))
-        # For new tasks, pass all fields that make sense
-        else:
-            for field in self._data.keys():
-                if field in self.read_only_fields:
-                    continue
-                args.append('{0}:{1}'.format(field, self._data[field]))
-
-        return args
+        self.backend.denotate_task(self, annotation)
+        self.refresh(only_fields=['annotations'])
 
 
-    def refresh(self, only_fields=[]):
+    def refresh(self, only_fields=None, after_save=False):
         # Raise error when trying to refresh a task that has not been saved
         if not self.saved:
         # Raise error when trying to refresh a task that has not been saved
         if not self.saved:
-            raise self.NotSaved("Task needs to be saved to be refreshed")
+            raise Task.NotSaved('Task needs to be saved to be refreshed')
+
+        new_data = self.backend.refresh_task(self, after_save=after_save)
 
 
-        # We need to use ID as backup for uuid here for the refreshes
-        # of newly saved tasks. Any other place in the code is fine
-        # with using UUID only.
-        args = [self['uuid'] or self['id'], 'export']
-        new_data = json.loads(self.warrior.execute_command(args)[0])
         if only_fields:
             to_update = dict(
         if only_fields:
             to_update = dict(
-                [(k, new_data.get(k)) for k in only_fields])
-            self._data.update(to_update)
+                [(k, new_data.get(k)) for k in only_fields],
+            )
+            self._update_data(to_update, update_original=True)
         else:
         else:
-            self._data = new_data
-
-
-class TaskFilter(object):
-    """
-    A set of parameters to filter the task list with.
-    """
-
-    def __init__(self, filter_params=[]):
-        self.filter_params = filter_params
-
-    def add_filter(self, filter_str):
-        self.filter_params.append(filter_str)
-
-    def add_filter_param(self, key, value):
-        key = key.replace('__', '.')
-
-        # Replace the value with empty string, since that is the
-        # convention in TW for empty values
-        value = value if value is not None else ''
-        self.filter_params.append('{0}:{1}'.format(key, value))
-
-    def get_filter_params(self):
-        return [f for f in self.filter_params if f]
-
-    def clone(self):
-        c = self.__class__()
-        c.filter_params = list(self.filter_params)
-        return c
+            self._load_data(new_data)
 
 
 class TaskQuerySet(object):
 
 
 class TaskQuerySet(object):
@@ -283,16 +452,16 @@ class TaskQuerySet(object):
     Represents a lazy lookup for a task objects.
     """
 
     Represents a lazy lookup for a task objects.
     """
 
-    def __init__(self, warrior=None, filter_obj=None):
-        self.warrior = warrior
+    def __init__(self, backend, filter_obj=None):
+        self.backend = backend
         self._result_cache = None
         self._result_cache = None
-        self.filter_obj = filter_obj or TaskFilter()
+        self.filter_obj = filter_obj or self.backend.filter_class(backend)
 
     def __deepcopy__(self, memo):
         """
         Deep copy of a QuerySet doesn't populate the cache
         """
 
     def __deepcopy__(self, memo):
         """
         Deep copy of a QuerySet doesn't populate the cache
         """
-        obj = self.__class__()
+        obj = self.__class__(backend=self.backend)
         for k, v in self.__dict__.items():
             if k in ('_iter', '_result_cache'):
                 obj.__dict__[k] = None
         for k, v in self.__dict__.items():
             if k in ('_iter', '_result_cache'):
                 obj.__dict__[k] = None
@@ -303,7 +472,7 @@ class TaskQuerySet(object):
     def __repr__(self):
         data = list(self[:REPR_OUTPUT_SIZE + 1])
         if len(data) > REPR_OUTPUT_SIZE:
     def __repr__(self):
         data = list(self[:REPR_OUTPUT_SIZE + 1])
         if len(data) > REPR_OUTPUT_SIZE:
-            data[-1] = "...(remaining elements truncated)..."
+            data[-1] = '...(remaining elements truncated)...'
         return repr(data)
 
     def __len__(self):
         return repr(data)
 
     def __len__(self):
@@ -337,7 +506,7 @@ class TaskQuerySet(object):
         if klass is None:
             klass = self.__class__
         filter_obj = self.filter_obj.clone()
         if klass is None:
             klass = self.__class__
         filter_obj = self.filter_obj.clone()
-        c = klass(warrior=self.warrior, filter_obj=filter_obj)
+        c = klass(backend=self.backend, filter_obj=filter_obj)
         c.__dict__.update(kwargs)
         return c
 
         c.__dict__.update(kwargs)
         return c
 
@@ -345,7 +514,7 @@ class TaskQuerySet(object):
         """
         Fetch the tasks which match the current filters.
         """
         """
         Fetch the tasks which match the current filters.
         """
-        return self.warrior.filter_tasks(self.filter_obj)
+        return self.backend.filter_tasks(self.filter_obj)
 
     def all(self):
         """
 
     def all(self):
         """
@@ -359,6 +528,15 @@ class TaskQuerySet(object):
     def completed(self):
         return self.filter(status=COMPLETED)
 
     def completed(self):
         return self.filter(status=COMPLETED)
 
+    def deleted(self):
+        return self.filter(status=DELETED)
+
+    def waiting(self):
+        return self.filter(status=WAITING)
+
+    def recurring(self):
+        return self.filter(status=RECURRING)
+
     def filter(self, *args, **kwargs):
         """
         Returns a new TaskQuerySet with the given filters added.
     def filter(self, *args, **kwargs):
         """
         Returns a new TaskQuerySet with the given filters added.
@@ -382,65 +560,9 @@ class TaskQuerySet(object):
         if not num:
             raise Task.DoesNotExist(
                 'Task matching query does not exist. '
         if not num:
             raise Task.DoesNotExist(
                 'Task matching query does not exist. '
-                'Lookup parameters were {0}'.format(kwargs))
+                'Lookup parameters were {0}'.format(kwargs),
+            )
         raise ValueError(
             'get() returned more than one Task -- it returned {0}! '
         raise ValueError(
             'get() returned more than one Task -- it returned {0}! '
-            'Lookup parameters were {1}'.format(num, kwargs))
-
-
-class TaskWarrior(object):
-    def __init__(self, data_location='~/.task', create=True):
-        data_location = os.path.expanduser(data_location)
-        if create and not os.path.exists(data_location):
-            os.makedirs(data_location)
-        self.config = {
-            'data.location': os.path.expanduser(data_location),
-        }
-        self.tasks = TaskQuerySet(self)
-
-    def _get_command_args(self, args, config_override={}):
-        command_args = ['task', 'rc:/']
-        config = self.config.copy()
-        config.update(config_override)
-        for item in config.items():
-            command_args.append('rc.{0}={1}'.format(*item))
-        command_args.extend(map(str, args))
-        return command_args
-
-    def execute_command(self, args, config_override={}):
-        command_args = self._get_command_args(
-            args, config_override=config_override)
-        logger.debug(' '.join(command_args))
-        p = subprocess.Popen(command_args, stdout=subprocess.PIPE,
-                             stderr=subprocess.PIPE)
-        stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
-        if p.returncode:
-            if stderr.strip():
-                error_msg = stderr.strip().splitlines()[-1]
-            else:
-                error_msg = stdout.strip()
-            raise TaskWarriorException(error_msg)
-        return stdout.strip().split('\n')
-
-    def filter_tasks(self, filter_obj):
-        args = ['export', '--'] + filter_obj.get_filter_params()
-        tasks = []
-        for line in self.execute_command(args):
-            if line:
-                data = line.strip(',')
-                try:
-                    tasks.append(Task(self, json.loads(data)))
-                except ValueError:
-                    raise TaskWarriorException('Invalid JSON: %s' % data)
-        return tasks
-
-    def merge_with(self, path, push=False):
-        path = path.rstrip('/') + '/'
-        self.execute_command(['merge', path], config_override={
-            'merge.autopush': 'yes' if push else 'no',
-        })
-
-    def undo(self):
-        self.execute_command(['undo'], config_override={
-            'confirmation': 'no',
-        })
+            'Lookup parameters were {1}'.format(num, kwargs),
+        )