]> git.madduck.net Git - etc/taskwarrior.git/blobdiff - tasklib/task.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Merge branch 'develop'
[etc/taskwarrior.git] / tasklib / task.py
index 17af8d4977eaaea69f99e2b7608b53b2851e0cbc..20bff1fef9026cc0db9e183c32cd757b0fe6f2f1 100644 (file)
@@ -1,31 +1,23 @@
 from __future__ import print_function
 import copy
 from __future__ import print_function
 import copy
-import datetime
+import importlib
 import json
 import logging
 import os
 import json
 import logging
 import os
-import pytz
 import six
 import sys
 import six
 import sys
-import subprocess
-import tzlocal
+
+from .serializing import SerializingObject
 
 DATE_FORMAT = '%Y%m%dT%H%M%SZ'
 REPR_OUTPUT_SIZE = 10
 PENDING = 'pending'
 COMPLETED = 'completed'
 
 DATE_FORMAT = '%Y%m%dT%H%M%SZ'
 REPR_OUTPUT_SIZE = 10
 PENDING = 'pending'
 COMPLETED = 'completed'
-
-VERSION_2_1_0 = six.u('2.1.0')
-VERSION_2_2_0 = six.u('2.2.0')
-VERSION_2_3_0 = six.u('2.3.0')
-VERSION_2_4_0 = six.u('2.4.0')
+DELETED = 'deleted'
+WAITING = 'waiting'
+RECURRING = 'recurring'
 
 logger = logging.getLogger(__name__)
 
 logger = logging.getLogger(__name__)
-local_zone = tzlocal.get_localzone()
-
-
-class TaskWarriorException(Exception):
-    pass
 
 
 class ReadOnlyDictView(object):
 
 
 class ReadOnlyDictView(object):
@@ -49,12 +41,14 @@ class ReadOnlyDictView(object):
     def __len__(self):
         return len(self.viewed_dict)
 
     def __len__(self):
         return len(self.viewed_dict)
 
+    def __unicode__(self):
+        return six.u('ReadOnlyDictView: {0}'.format(repr(self.viewed_dict)))
+
+    __repr__ = __unicode__
+
     def get(self, key, default=None):
         return copy.deepcopy(self.viewed_dict.get(key, default))
 
     def get(self, key, default=None):
         return copy.deepcopy(self.viewed_dict.get(key, default))
 
-    def has_key(self, key):
-        return self.viewed_dict.has_key(key)
-
     def items(self):
         return [copy.deepcopy(v) for v in self.viewed_dict.items()]
 
     def items(self):
         return [copy.deepcopy(v) for v in self.viewed_dict.items()]
 
@@ -62,182 +56,6 @@ class ReadOnlyDictView(object):
         return [copy.deepcopy(v) for v in self.viewed_dict.values()]
 
 
         return [copy.deepcopy(v) for v in self.viewed_dict.values()]
 
 
-class SerializingObject(object):
-    """
-    Common ancestor for TaskResource & TaskFilter, since they both
-    need to serialize arguments.
-
-    Serializing method should hold the following contract:
-      - any empty value (meaning removal of the attribute)
-        is deserialized into a empty string
-      - None denotes a empty value for any attribute
-
-    Deserializing method should hold the following contract:
-      - None denotes a empty value for any attribute (however,
-        this is here as a safeguard, TaskWarrior currently does
-        not export empty-valued attributes) if the attribute
-        is not iterable (e.g. list or set), in which case
-        a empty iterable should be used.
-    """
-
-    def _deserialize(self, key, value):
-        hydrate_func = getattr(self, 'deserialize_{0}'.format(key),
-                               lambda x: x if x != '' else None)
-        return hydrate_func(value)
-
-    def _serialize(self, key, value):
-        dehydrate_func = getattr(self, 'serialize_{0}'.format(key),
-                                 lambda x: x if x is not None else '')
-        return dehydrate_func(value)
-
-    def _normalize(self, key, value):
-        """
-        Use normalize_<key> methods to normalize user input. Any user
-        input will be normalized at the moment it is used as filter,
-        or entered as a value of Task attribute.
-        """
-
-        normalize_func = getattr(self, 'normalize_{0}'.format(key),
-                                 lambda x: x)
-
-        return normalize_func(value)
-
-    def timestamp_serializer(self, date):
-        if not date:
-            return ''
-
-        # Any serialized timestamp should be localized, we need to
-        # convert to UTC before converting to string (DATE_FORMAT uses UTC)
-        date = date.astimezone(pytz.utc)
-
-        return date.strftime(DATE_FORMAT)
-
-    def timestamp_deserializer(self, date_str):
-        if not date_str:
-            return None
-
-        # Return timestamp localized in the local zone
-        naive_timestamp = datetime.datetime.strptime(date_str, DATE_FORMAT)
-        localized_timestamp = pytz.utc.localize(naive_timestamp)
-        return localized_timestamp.astimezone(local_zone)
-
-    def serialize_entry(self, value):
-        return self.timestamp_serializer(value)
-
-    def deserialize_entry(self, value):
-        return self.timestamp_deserializer(value)
-
-    def normalize_entry(self, value):
-        return self.datetime_normalizer(value)
-
-    def serialize_modified(self, value):
-        return self.timestamp_serializer(value)
-
-    def deserialize_modified(self, value):
-        return self.timestamp_deserializer(value)
-
-    def normalize_modified(self, value):
-        return self.datetime_normalizer(value)
-
-    def serialize_due(self, value):
-        return self.timestamp_serializer(value)
-
-    def deserialize_due(self, value):
-        return self.timestamp_deserializer(value)
-
-    def normalize_due(self, value):
-        return self.datetime_normalizer(value)
-
-    def serialize_scheduled(self, value):
-        return self.timestamp_serializer(value)
-
-    def deserialize_scheduled(self, value):
-        return self.timestamp_deserializer(value)
-
-    def normalize_scheduled(self, value):
-        return self.datetime_normalizer(value)
-
-    def serialize_until(self, value):
-        return self.timestamp_serializer(value)
-
-    def deserialize_until(self, value):
-        return self.timestamp_deserializer(value)
-
-    def normalize_until(self, value):
-        return self.datetime_normalizer(value)
-
-    def serialize_wait(self, value):
-        return self.timestamp_serializer(value)
-
-    def deserialize_wait(self, value):
-        return self.timestamp_deserializer(value)
-
-    def normalize_wait(self, value):
-        return self.datetime_normalizer(value)
-
-    def serialize_annotations(self, value):
-        value = value if value is not None else []
-
-        # This may seem weird, but it's correct, we want to export
-        # a list of dicts as serialized value
-        serialized_annotations = [json.loads(annotation.export_data())
-                                  for annotation in value]
-        return serialized_annotations if serialized_annotations else ''
-
-    def deserialize_annotations(self, data):
-        return [TaskAnnotation(self, d) for d in data] if data else []
-
-    def serialize_tags(self, tags):
-        return ','.join(tags) if tags else ''
-
-    def deserialize_tags(self, tags):
-        if isinstance(tags, six.string_types):
-            return tags.split(',') if tags else []
-        return tags or []
-
-    def serialize_depends(self, value):
-        # Return the list of uuids
-        value = value if value is not None else set()
-        return ','.join(task['uuid'] for task in value)
-
-    def deserialize_depends(self, raw_uuids):
-        raw_uuids = raw_uuids or ''  # Convert None to empty string
-        uuids = raw_uuids.split(',')
-        return set(self.warrior.tasks.get(uuid=uuid) for uuid in uuids if uuid)
-
-    def datetime_normalizer(self, value):
-        """
-        Normalizes date/datetime value (considered to come from user input)
-        to localized datetime value. Following conversions happen:
-
-        naive date -> localized datetime with the same date, and time=midnight
-        naive datetime -> localized datetime with the same value
-        localized datetime -> localized datetime (no conversion)
-        """
-
-        if (isinstance(value, datetime.date)
-            and not isinstance(value, datetime.datetime)):
-            # Convert to local midnight
-            value_full = datetime.datetime.combine(value, datetime.time.min)
-            localized = local_zone.localize(value_full)
-        elif isinstance(value, datetime.datetime) and value.tzinfo is None:
-            # Convert to localized datetime object
-            localized = local_zone.localize(value)
-        else:
-            # If the value is already localized, there is no need to change
-            # time zone at this point. Also None is a valid value too.
-            localized = value
-        
-        return localized
-
-    def normalize_uuid(self, value):
-        # Enforce sane UUID
-        if not isinstance(value, six.text_type) or value == '':
-            raise ValueError("UUID must be a valid non-empty string.")
-
-        return value
-
-
 class TaskResource(SerializingObject):
     read_only_fields = []
 
 class TaskResource(SerializingObject):
     read_only_fields = []
 
@@ -248,7 +66,7 @@ class TaskResource(SerializingObject):
         # are not propagated.
         self._original_data = copy.deepcopy(self._data)
 
         # are not propagated.
         self._original_data = copy.deepcopy(self._data)
 
-    def _update_data(self, data, update_original=False):
+    def _update_data(self, data, update_original=False, remove_missing=False):
         """
         Low level update of the internal _data dict. Data which are coming as
         updates should already be serialized. If update_original is True, the
         """
         Low level update of the internal _data dict. Data which are coming as
         updates should already be serialized. If update_original is True, the
@@ -257,10 +75,14 @@ class TaskResource(SerializingObject):
         self._data.update(dict((key, self._deserialize(key, value))
                                for key, value in data.items()))
 
         self._data.update(dict((key, self._deserialize(key, value))
                                for key, value in data.items()))
 
+        # In certain situations, we want to treat missing keys as removals
+        if remove_missing:
+            for key in set(self._data.keys()) - set(data.keys()):
+                self._data[key] = None
+
         if update_original:
             self._original_data = copy.deepcopy(self._data)
 
         if update_original:
             self._original_data = copy.deepcopy(self._data)
 
-
     def __getitem__(self, key):
         # This is a workaround to make TaskResource non-iterable
         # over simple index-based iteration
     def __getitem__(self, key):
         # This is a workaround to make TaskResource non-iterable
         # over simple index-based iteration
@@ -305,7 +127,7 @@ class TaskResource(SerializingObject):
         # to pass that to TaskWarrior.
         data_tuples = filter(lambda t: t[1] is not '', data_tuples)
         data = dict(data_tuples)
         # to pass that to TaskWarrior.
         data_tuples = filter(lambda t: t[1] is not '', data_tuples)
         data = dict(data_tuples)
-        return json.dumps(data, separators=(',',':'))
+        return json.dumps(data, separators=(',', ':'))
 
     @property
     def _modified_fields(self):
 
     @property
     def _modified_fields(self):
@@ -330,9 +152,10 @@ class TaskResource(SerializingObject):
 class TaskAnnotation(TaskResource):
     read_only_fields = ['entry', 'description']
 
 class TaskAnnotation(TaskResource):
     read_only_fields = ['entry', 'description']
 
-    def __init__(self, task, data={}):
+    def __init__(self, task, data=None):
         self.task = task
         self.task = task
-        self._load_data(data)
+        self._load_data(data or dict())
+        super(TaskAnnotation, self).__init__(task.backend)
 
     def remove(self):
         self.task.remove_annotation(self)
 
     def remove(self):
         self.task.remove_annotation(self)
@@ -345,6 +168,9 @@ class TaskAnnotation(TaskResource):
         # their data dics are the same
         return self.task == other.task and self._data == other._data
 
         # their data dics are the same
         return self.task == other.task and self._data == other._data
 
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
     __repr__ = __unicode__
 
 
     __repr__ = __unicode__
 
 
@@ -366,6 +192,18 @@ class Task(TaskResource):
         """
         pass
 
         """
         pass
 
+    class ActiveTask(Exception):
+        """
+        Raised when the operation cannot be performed on the active task.
+        """
+        pass
+
+    class InactiveTask(Exception):
+        """
+        Raised when the operation cannot be performed on an inactive task.
+        """
+        pass
+
     class NotSaved(Exception):
         """
         Raised when the operation cannot be performed on the task, because
     class NotSaved(Exception):
         """
         Raised when the operation cannot be performed on the task, because
@@ -374,7 +212,7 @@ class Task(TaskResource):
         pass
 
     @classmethod
         pass
 
     @classmethod
-    def from_input(cls, input_file=sys.stdin, modify=None):
+    def from_input(cls, input_file=sys.stdin, modify=None, backend=None):
         """
         Creates a Task object, directly from the stdin, by reading one line.
         If modify=True, two lines are used, first line interpreted as the
         """
         Creates a Task object, directly from the stdin, by reading one line.
         If modify=True, two lines are used, first line interpreted as the
@@ -390,25 +228,32 @@ class Task(TaskResource):
         but defaults to sys.stdin.
         """
 
         but defaults to sys.stdin.
         """
 
-        # TaskWarrior instance is set to None
-        task = cls(None)
-
         # Detect the hook type if not given directly
         name = os.path.basename(sys.argv[0])
         modify = name.startswith('on-modify') if modify is None else modify
 
         # Detect the hook type if not given directly
         name = os.path.basename(sys.argv[0])
         modify = name.startswith('on-modify') if modify is None else modify
 
+        # Create the TaskWarrior instance if none passed
+        if backend is None:
+            backends = importlib.import_module('tasklib.backends')
+            hook_parent_dir = os.path.dirname(os.path.dirname(sys.argv[0]))
+            backend = backends.TaskWarrior(data_location=hook_parent_dir)
+
+        # TaskWarrior instance is set to None
+        task = cls(backend)
+
         # Load the data from the input
         task._load_data(json.loads(input_file.readline().strip()))
 
         # If this is a on-modify event, we are provided with additional
         # line of input, which provides updated data
         if modify:
         # Load the data from the input
         task._load_data(json.loads(input_file.readline().strip()))
 
         # If this is a on-modify event, we are provided with additional
         # line of input, which provides updated data
         if modify:
-            task._update_data(json.loads(input_file.readline().strip()))
+            task._update_data(json.loads(input_file.readline().strip()),
+                              remove_missing=True)
 
         return task
 
 
         return task
 
-    def __init__(self, warrior, **kwargs):
-        self.warrior = warrior
+    def __init__(self, backend, **kwargs):
+        super(Task, self).__init__(backend)
 
         # Check that user is not able to set read-only value in __init__
         for key in kwargs.keys():
 
         # Check that user is not able to set read-only value in __init__
         for key in kwargs.keys():
@@ -438,6 +283,8 @@ class Task(TaskResource):
             # If the tasks are not saved, compare the actual instances
             return id(self) == id(other)
 
             # If the tasks are not saved, compare the actual instances
             return id(self) == id(other)
 
+    def __ne__(self, other):
+        return not self.__eq__(other)
 
     def __hash__(self):
         if self['uuid']:
 
     def __hash__(self):
         if self['uuid']:
@@ -463,6 +310,14 @@ class Task(TaskResource):
     def pending(self):
         return self['status'] == six.text_type('pending')
 
     def pending(self):
         return self['status'] == six.text_type('pending')
 
+    @property
+    def recurring(self):
+        return self['status'] == six.text_type('recurring')
+
+    @property
+    def active(self):
+        return self['start'] is not None
+
     @property
     def saved(self):
         return self['uuid'] is not None or self['id'] is not None
     @property
     def saved(self):
         return self['uuid'] is not None or self['id'] is not None
@@ -471,229 +326,142 @@ class Task(TaskResource):
         # Check that all the tasks are saved
         for task in (cur_dependencies or set()):
             if not task.saved:
         # Check that all the tasks are saved
         for task in (cur_dependencies or set()):
             if not task.saved:
-                raise Task.NotSaved('Task \'%s\' needs to be saved before '
-                                    'it can be set as dependency.' % task)
+                raise Task.NotSaved(
+                    'Task \'%s\' needs to be saved before '
+                    'it can be set as dependency.' % task,
+                )
 
         return super(Task, self).serialize_depends(cur_dependencies)
 
 
         return super(Task, self).serialize_depends(cur_dependencies)
 
-    def format_depends(self):
-        # We need to generate added and removed dependencies list,
-        # since Taskwarrior does not accept redefining dependencies.
-
-        # This cannot be part of serialize_depends, since we need
-        # to keep a list of all depedencies in the _data dictionary,
-        # not just currently added/removed ones
+    def delete(self):
+        if not self.saved:
+            raise Task.NotSaved(
+                'Task needs to be saved before it can be deleted',
+            )
 
 
-        old_dependencies = self._original_data.get('depends', set())
+        # Refresh the status, and raise exception if the task is deleted
+        self.refresh(only_fields=['status'])
 
 
-        added = self['depends'] - old_dependencies
-        removed = old_dependencies - self['depends']
+        if self.deleted:
+            raise Task.DeletedTask('Task was already deleted')
 
 
-        # Removed dependencies need to be prefixed with '-'
-        return 'depends:' + ','.join(
-                [t['uuid'] for t in added] +
-                ['-' + t['uuid'] for t in removed]
-            )
+        self.backend.delete_task(self)
 
 
-    def format_description(self):
-        # Task version older than 2.4.0 ignores first word of the
-        # task description if description: prefix is used
-        if self.warrior.version < VERSION_2_4_0:
-            return self._data['description']
-        else:
-            return "description:'{0}'".format(self._data['description'] or '')
+        # Refresh the status again, so that we have updated info stored
+        self.refresh(only_fields=['status', 'start', 'end'])
 
 
-    def delete(self):
+    def start(self):
         if not self.saved:
         if not self.saved:
-            raise Task.NotSaved("Task needs to be saved before it can be deleted")
+            raise Task.NotSaved(
+                'Task needs to be saved before it can be started',
+            )
 
 
-        # Refresh the status, and raise exception if the task is deleted
+        # Refresh, and raise exception if task is already completed/deleted
         self.refresh(only_fields=['status'])
 
         self.refresh(only_fields=['status'])
 
-        if self.deleted:
-            raise Task.DeletedTask("Task was already deleted")
+        if self.completed:
+            raise Task.CompletedTask('Cannot start a completed task')
+        elif self.deleted:
+            raise Task.DeletedTask('Deleted task cannot be started')
+        elif self.active:
+            raise Task.ActiveTask('Task is already active')
 
 
-        self.warrior.execute_command([self['uuid'], 'delete'])
+        self.backend.start_task(self)
 
         # Refresh the status again, so that we have updated info stored
 
         # Refresh the status again, so that we have updated info stored
+        self.refresh(only_fields=['status', 'start'])
+
+    def stop(self):
+        if not self.saved:
+            raise Task.NotSaved(
+                'Task needs to be saved before it can be stopped',
+            )
+
+        # Refresh, and raise exception if task is already completed/deleted
         self.refresh(only_fields=['status'])
 
         self.refresh(only_fields=['status'])
 
+        if not self.active:
+            raise Task.InactiveTask('Cannot stop an inactive task')
+
+        self.backend.stop_task(self)
+
+        # Refresh the status again, so that we have updated info stored
+        self.refresh(only_fields=['status', 'start'])
 
     def done(self):
         if not self.saved:
 
     def done(self):
         if not self.saved:
-            raise Task.NotSaved("Task needs to be saved before it can be completed")
+            raise Task.NotSaved(
+                'Task needs to be saved before it can be completed',
+            )
 
         # Refresh, and raise exception if task is already completed/deleted
         self.refresh(only_fields=['status'])
 
         if self.completed:
 
         # Refresh, and raise exception if task is already completed/deleted
         self.refresh(only_fields=['status'])
 
         if self.completed:
-            raise Task.CompletedTask("Cannot complete a completed task")
+            raise Task.CompletedTask('Cannot complete a completed task')
         elif self.deleted:
         elif self.deleted:
-            raise Task.DeletedTask("Deleted task cannot be completed")
+            raise Task.DeletedTask('Deleted task cannot be completed')
 
 
-        self.warrior.execute_command([self['uuid'], 'done'])
+        self.backend.complete_task(self)
 
         # Refresh the status again, so that we have updated info stored
 
         # Refresh the status again, so that we have updated info stored
-        self.refresh(only_fields=['status'])
+        self.refresh(only_fields=['status', 'start', 'end'])
 
     def save(self):
         if self.saved and not self.modified:
             return
 
 
     def save(self):
         if self.saved and not self.modified:
             return
 
-        args = [self['uuid'], 'modify'] if self.saved else ['add']
-        args.extend(self._get_modified_fields_as_args())
-        output = self.warrior.execute_command(args)
-
-        # Parse out the new ID, if the task is being added for the first time
-        if not self.saved:
-            id_lines = [l for l in output if l.startswith('Created task ')]
-
-            # Complain loudly if it seems that more tasks were created
-            # Should not happen
-            if len(id_lines) != 1 or len(id_lines[0].split(' ')) != 3:
-                raise TaskWarriorException("Unexpected output when creating "
-                                           "task: %s" % '\n'.join(id_lines))
-
-            # Circumvent the ID storage, since ID is considered read-only
-            self._data['id'] = int(id_lines[0].split(' ')[2].rstrip('.'))
-
-        # Refreshing is very important here, as not only modification time
-        # is updated, but arbitrary attribute may have changed due hooks
-        # altering the data before saving
-        self.refresh()
+        # All the actual work is done by the backend
+        self.backend.save_task(self)
 
     def add_annotation(self, annotation):
         if not self.saved:
 
     def add_annotation(self, annotation):
         if not self.saved:
-            raise Task.NotSaved("Task needs to be saved to add annotation")
+            raise Task.NotSaved('Task needs to be saved to add annotation')
 
 
-        args = [self['uuid'], 'annotate', annotation]
-        self.warrior.execute_command(args)
+        self.backend.annotate_task(self, annotation)
         self.refresh(only_fields=['annotations'])
 
     def remove_annotation(self, annotation):
         if not self.saved:
         self.refresh(only_fields=['annotations'])
 
     def remove_annotation(self, annotation):
         if not self.saved:
-            raise Task.NotSaved("Task needs to be saved to remove annotation")
+            raise Task.NotSaved('Task needs to be saved to remove annotation')
 
         if isinstance(annotation, TaskAnnotation):
             annotation = annotation['description']
 
         if isinstance(annotation, TaskAnnotation):
             annotation = annotation['description']
-        args = [self['uuid'], 'denotate', annotation]
-        self.warrior.execute_command(args)
-        self.refresh(only_fields=['annotations'])
-
-    def _get_modified_fields_as_args(self):
-        args = []
-
-        def add_field(field):
-            # Add the output of format_field method to args list (defaults to
-            # field:value)
-            serialized_value = self._serialize(field, self._data[field])
-
-            # Empty values should not be enclosed in quotation marks, see
-            # TW-1510
-            if serialized_value is '':
-                escaped_serialized_value = ''
-            else:
-                escaped_serialized_value = "'{0}'".format(serialized_value)
-
-            format_default = lambda: "{0}:{1}".format(field,
-                                                      escaped_serialized_value)
-
-            format_func = getattr(self, 'format_{0}'.format(field),
-                                  format_default)
-
-            args.append(format_func())
-
-        # If we're modifying saved task, simply pass on all modified fields
-        if self.saved:
-            for field in self._modified_fields:
-                add_field(field)
-        # For new tasks, pass all fields that make sense
-        else:
-            for field in self._data.keys():
-                if field in self.read_only_fields:
-                    continue
-                add_field(field)
 
 
-        return args
+        self.backend.denotate_task(self, annotation)
+        self.refresh(only_fields=['annotations'])
 
 
-    def refresh(self, only_fields=[]):
+    def refresh(self, only_fields=None, after_save=False):
         # Raise error when trying to refresh a task that has not been saved
         if not self.saved:
         # Raise error when trying to refresh a task that has not been saved
         if not self.saved:
-            raise Task.NotSaved("Task needs to be saved to be refreshed")
+            raise Task.NotSaved('Task needs to be saved to be refreshed')
+
+        new_data = self.backend.refresh_task(self, after_save=after_save)
 
 
-        # We need to use ID as backup for uuid here for the refreshes
-        # of newly saved tasks. Any other place in the code is fine
-        # with using UUID only.
-        args = [self['uuid'] or self['id'], 'export']
-        new_data = json.loads(self.warrior.execute_command(args)[0])
         if only_fields:
             to_update = dict(
         if only_fields:
             to_update = dict(
-                [(k, new_data.get(k)) for k in only_fields])
+                [(k, new_data.get(k)) for k in only_fields],
+            )
             self._update_data(to_update, update_original=True)
         else:
             self._load_data(new_data)
 
             self._update_data(to_update, update_original=True)
         else:
             self._load_data(new_data)
 
-class TaskFilter(SerializingObject):
-    """
-    A set of parameters to filter the task list with.
-    """
-
-    def __init__(self, filter_params=[]):
-        self.filter_params = filter_params
-
-    def add_filter(self, filter_str):
-        self.filter_params.append(filter_str)
-
-    def add_filter_param(self, key, value):
-        key = key.replace('__', '.')
-
-        # Replace the value with empty string, since that is the
-        # convention in TW for empty values
-        attribute_key = key.split('.')[0]
-
-        # Since this is user input, we need to normalize before we serialize
-        value = self._normalize(key, value)
-        value = self._serialize(attribute_key, value)
-
-        # If we are filtering by uuid:, do not use uuid keyword
-        # due to TW-1452 bug
-        if key == 'uuid':
-            self.filter_params.insert(0, value)
-        else:
-            # Surround value with aphostrophes unless it's a empty string
-            value = "'%s'" % value if value else ''
-
-            # We enforce equality match by using 'is' (or 'none') modifier
-            # Without using this syntax, filter fails due to TW-1479
-            modifier = '.is' if value else '.none'
-            key = key + modifier if '.' not in key else key
-
-            self.filter_params.append("{0}:{1}".format(key, value))
-
-    def get_filter_params(self):
-        return [f for f in self.filter_params if f]
-
-    def clone(self):
-        c = self.__class__()
-        c.filter_params = list(self.filter_params)
-        return c
-
 
 class TaskQuerySet(object):
     """
     Represents a lazy lookup for a task objects.
     """
 
 
 class TaskQuerySet(object):
     """
     Represents a lazy lookup for a task objects.
     """
 
-    def __init__(self, warrior=None, filter_obj=None):
-        self.warrior = warrior
+    def __init__(self, backend, filter_obj=None):
+        self.backend = backend
         self._result_cache = None
         self._result_cache = None
-        self.filter_obj = filter_obj or TaskFilter()
+        self.filter_obj = filter_obj or self.backend.filter_class(backend)
 
     def __deepcopy__(self, memo):
         """
         Deep copy of a QuerySet doesn't populate the cache
         """
 
     def __deepcopy__(self, memo):
         """
         Deep copy of a QuerySet doesn't populate the cache
         """
-        obj = self.__class__()
+        obj = self.__class__(backend=self.backend)
         for k, v in self.__dict__.items():
             if k in ('_iter', '_result_cache'):
                 obj.__dict__[k] = None
         for k, v in self.__dict__.items():
             if k in ('_iter', '_result_cache'):
                 obj.__dict__[k] = None
@@ -704,7 +472,7 @@ class TaskQuerySet(object):
     def __repr__(self):
         data = list(self[:REPR_OUTPUT_SIZE + 1])
         if len(data) > REPR_OUTPUT_SIZE:
     def __repr__(self):
         data = list(self[:REPR_OUTPUT_SIZE + 1])
         if len(data) > REPR_OUTPUT_SIZE:
-            data[-1] = "...(remaining elements truncated)..."
+            data[-1] = '...(remaining elements truncated)...'
         return repr(data)
 
     def __len__(self):
         return repr(data)
 
     def __len__(self):
@@ -738,7 +506,7 @@ class TaskQuerySet(object):
         if klass is None:
             klass = self.__class__
         filter_obj = self.filter_obj.clone()
         if klass is None:
             klass = self.__class__
         filter_obj = self.filter_obj.clone()
-        c = klass(warrior=self.warrior, filter_obj=filter_obj)
+        c = klass(backend=self.backend, filter_obj=filter_obj)
         c.__dict__.update(kwargs)
         return c
 
         c.__dict__.update(kwargs)
         return c
 
@@ -746,7 +514,7 @@ class TaskQuerySet(object):
         """
         Fetch the tasks which match the current filters.
         """
         """
         Fetch the tasks which match the current filters.
         """
-        return self.warrior.filter_tasks(self.filter_obj)
+        return self.backend.filter_tasks(self.filter_obj)
 
     def all(self):
         """
 
     def all(self):
         """
@@ -760,6 +528,15 @@ class TaskQuerySet(object):
     def completed(self):
         return self.filter(status=COMPLETED)
 
     def completed(self):
         return self.filter(status=COMPLETED)
 
+    def deleted(self):
+        return self.filter(status=DELETED)
+
+    def waiting(self):
+        return self.filter(status=WAITING)
+
+    def recurring(self):
+        return self.filter(status=RECURRING)
+
     def filter(self, *args, **kwargs):
         """
         Returns a new TaskQuerySet with the given filters added.
     def filter(self, *args, **kwargs):
         """
         Returns a new TaskQuerySet with the given filters added.
@@ -783,85 +560,9 @@ class TaskQuerySet(object):
         if not num:
             raise Task.DoesNotExist(
                 'Task matching query does not exist. '
         if not num:
             raise Task.DoesNotExist(
                 'Task matching query does not exist. '
-                'Lookup parameters were {0}'.format(kwargs))
+                'Lookup parameters were {0}'.format(kwargs),
+            )
         raise ValueError(
             'get() returned more than one Task -- it returned {0}! '
         raise ValueError(
             'get() returned more than one Task -- it returned {0}! '
-            'Lookup parameters were {1}'.format(num, kwargs))
-
-
-class TaskWarrior(object):
-    def __init__(self, data_location='~/.task', create=True):
-        data_location = os.path.expanduser(data_location)
-        if create and not os.path.exists(data_location):
-            os.makedirs(data_location)
-        self.config = {
-            'data.location': os.path.expanduser(data_location),
-            'confirmation': 'no',
-            'dependency.confirmation': 'no',  # See TW-1483 or taskrc man page
-            'recurrence.confirmation': 'no',  # Necessary for modifying R tasks
-        }
-        self.tasks = TaskQuerySet(self)
-        self.version = self._get_version()
-
-    def _get_command_args(self, args, config_override={}):
-        command_args = ['task', 'rc:/']
-        config = self.config.copy()
-        config.update(config_override)
-        for item in config.items():
-            command_args.append('rc.{0}={1}'.format(*item))
-        command_args.extend(map(str, args))
-        return command_args
-
-    def _get_version(self):
-        p = subprocess.Popen(
-                ['task', '--version'],
-                stdout=subprocess.PIPE,
-                stderr=subprocess.PIPE)
-        stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
-        return stdout.strip('\n')
-
-    def execute_command(self, args, config_override={}, allow_failure=True):
-        command_args = self._get_command_args(
-            args, config_override=config_override)
-        logger.debug(' '.join(command_args))
-        p = subprocess.Popen(command_args, stdout=subprocess.PIPE,
-                             stderr=subprocess.PIPE)
-        stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
-        if p.returncode and allow_failure:
-            if stderr.strip():
-                error_msg = stderr.strip().splitlines()[-1]
-            else:
-                error_msg = stdout.strip()
-            raise TaskWarriorException(error_msg)
-        return stdout.strip().split('\n')
-
-    def enforce_recurrence(self):
-        # Run arbitrary report command which will trigger generation
-        # of recurrent tasks.
-        # TODO: Make a version dependant enforcement once
-        #       TW-1531 is handled
-        self.execute_command(['next'], allow_failure=False)
-
-    def filter_tasks(self, filter_obj):
-        self.enforce_recurrence()
-        args = ['export', '--'] + filter_obj.get_filter_params()
-        tasks = []
-        for line in self.execute_command(args):
-            if line:
-                data = line.strip(',')
-                try:
-                    filtered_task = Task(self)
-                    filtered_task._load_data(json.loads(data))
-                    tasks.append(filtered_task)
-                except ValueError:
-                    raise TaskWarriorException('Invalid JSON: %s' % data)
-        return tasks
-
-    def merge_with(self, path, push=False):
-        path = path.rstrip('/') + '/'
-        self.execute_command(['merge', path], config_override={
-            'merge.autopush': 'yes' if push else 'no',
-        })
-
-    def undo(self):
-        self.execute_command(['undo'])
+            'Lookup parameters were {1}'.format(num, kwargs),
+        )