from __future__ import print_function
import copy
-import datetime
import importlib
import json
import logging
def __len__(self):
return len(self.viewed_dict)
+ def __unicode__(self):
+ return six.u('ReadOnlyDictView: {0}'.format(repr(self.viewed_dict)))
+
+ __repr__ = __unicode__
+
def get(self, key, default=None):
return copy.deepcopy(self.viewed_dict.get(key, default))
if update_original:
self._original_data = copy.deepcopy(self._data)
-
def __getitem__(self, key):
# This is a workaround to make TaskResource non-iterable
# over simple index-based iteration
# to pass that to TaskWarrior.
data_tuples = filter(lambda t: t[1] is not '', data_tuples)
data = dict(data_tuples)
- return json.dumps(data, separators=(',',':'))
+ return json.dumps(data, separators=(',', ':'))
@property
def _modified_fields(self):
# Create the TaskWarrior instance if none passed
if backend is None:
- backends = importlib.import_module('.backends')
+ backends = importlib.import_module('tasklib.backends')
hook_parent_dir = os.path.dirname(os.path.dirname(sys.argv[0]))
backend = backends.TaskWarrior(data_location=hook_parent_dir)
# If the tasks are not saved, compare the actual instances
return id(self) == id(other)
-
def __hash__(self):
if self['uuid']:
# For saved Tasks, just define equality by equality of uuids
else:
self._load_data(new_data)
+
class TaskQuerySet(object):
"""
Represents a lazy lookup for a task objects.
"""
- def __init__(self, backend=None, filter_obj=None):
+ def __init__(self, backend, filter_obj=None):
self.backend = backend
self._result_cache = None
self.filter_obj = filter_obj or self.backend.filter_class(backend)
"""
Deep copy of a QuerySet doesn't populate the cache
"""
- obj = self.__class__()
+ obj = self.__class__(backend=self.backend)
for k, v in self.__dict__.items():
if k in ('_iter', '_result_cache'):
obj.__dict__[k] = None