from __future__ import print_function
import copy
-import datetime
import importlib
import json
import logging
REPR_OUTPUT_SIZE = 10
PENDING = 'pending'
COMPLETED = 'completed'
+DELETED = 'deleted'
+WAITING = 'waiting'
logger = logging.getLogger(__name__)
def __len__(self):
return len(self.viewed_dict)
+ def __unicode__(self):
+ return six.u('ReadOnlyDictView: {0}'.format(repr(self.viewed_dict)))
+
+ __repr__ = __unicode__
+
def get(self, key, default=None):
return copy.deepcopy(self.viewed_dict.get(key, default))
if update_original:
self._original_data = copy.deepcopy(self._data)
-
def __getitem__(self, key):
# This is a workaround to make TaskResource non-iterable
# over simple index-based iteration
# to pass that to TaskWarrior.
data_tuples = filter(lambda t: t[1] is not '', data_tuples)
data = dict(data_tuples)
- return json.dumps(data, separators=(',',':'))
+ return json.dumps(data, separators=(',', ':'))
@property
def _modified_fields(self):
# their data dics are the same
return self.task == other.task and self._data == other._data
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
__repr__ = __unicode__
# Create the TaskWarrior instance if none passed
if backend is None:
- backends = importlib.import_module('.backends')
+ backends = importlib.import_module('tasklib.backends')
hook_parent_dir = os.path.dirname(os.path.dirname(sys.argv[0]))
backend = backends.TaskWarrior(data_location=hook_parent_dir)
# If the tasks are not saved, compare the actual instances
return id(self) == id(other)
+ def __ne__(self, other):
+ return not self.__eq__(other)
def __hash__(self):
if self['uuid']:
def pending(self):
return self['status'] == six.text_type('pending')
+ @property
+ def recurring(self):
+ return self['status'] == six.text_type('recurring')
+
@property
def active(self):
return self['start'] is not None
else:
self._load_data(new_data)
+
class TaskQuerySet(object):
"""
Represents a lazy lookup for a task objects.
"""
- def __init__(self, backend=None, filter_obj=None):
+ def __init__(self, backend, filter_obj=None):
self.backend = backend
self._result_cache = None
self.filter_obj = filter_obj or self.backend.filter_class(backend)
"""
Deep copy of a QuerySet doesn't populate the cache
"""
- obj = self.__class__()
+ obj = self.__class__(backend=self.backend)
for k, v in self.__dict__.items():
if k in ('_iter', '_result_cache'):
obj.__dict__[k] = None
def completed(self):
return self.filter(status=COMPLETED)
+ def deleted(self):
+ return self.filter(status=DELETED)
+
+ def waiting(self):
+ return self.filter(status=WAITING)
+
def filter(self, *args, **kwargs):
"""
Returns a new TaskQuerySet with the given filters added.