All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
1 from __future__ import print_function
10 DATE_FORMAT = '%Y%m%dT%H%M%SZ'
13 COMPLETED = 'completed'
15 logger = logging.getLogger(__name__)
18 class TaskWarriorException(Exception):
22 class TaskResource(object):
25 def _load_data(self, data):
28 def __getitem__(self, key):
29 hydrate_func = getattr(self, 'deserialize_{0}'.format(key),
31 return hydrate_func(self._data.get(key))
33 def __setitem__(self, key, value):
34 if key in self.read_only_fields:
35 raise RuntimeError('Field \'%s\' is read-only' % key)
36 dehydrate_func = getattr(self, 'serialize_{0}'.format(key),
38 self._data[key] = dehydrate_func(value)
39 self._modified_fields.add(key)
42 s = six.text_type(self.__unicode__())
51 class TaskAnnotation(TaskResource):
52 read_only_fields = ['entry', 'description']
54 def __init__(self, task, data={}):
58 def deserialize_entry(self, data):
59 return datetime.datetime.strptime(data, DATE_FORMAT) if data else None
61 def serialize_entry(self, date):
62 return date.strftime(DATE_FORMAT) if date else ''
65 self.task.remove_annotation(self)
67 def __unicode__(self):
68 return self['description']
70 __repr__ = __unicode__
73 class Task(TaskResource):
74 read_only_fields = ['id', 'entry', 'urgency', 'uuid']
76 class DoesNotExist(Exception):
79 class CompletedTask(Exception):
81 Raised when the operation cannot be performed on the completed task.
85 class DeletedTask(Exception):
87 Raised when the operation cannot be performed on the deleted task.
91 def __init__(self, warrior, data={}):
92 self.warrior = warrior
94 self._modified_fields = set()
96 def __unicode__(self):
97 return self['description']
101 return self['status'] == six.text_type('completed')
105 return self['status'] == six.text_type('deleted')
109 return self['status'] == six.text_type('waiting')
113 return self['status'] == six.text_type('pending')
115 def serialize_due(self, date):
116 return date.strftime(DATE_FORMAT)
118 def deserialize_due(self, date_str):
121 return datetime.datetime.strptime(date_str, DATE_FORMAT)
123 def deserialize_annotations(self, data):
124 return [TaskAnnotation(self, d) for d in data] if data else []
126 def deserialize_tags(self, tags):
127 if isinstance(tags, basestring):
128 return tags.split(',') if tags else []
131 def serialize_tags(self, tags):
132 return ','.join(tags) if tags else ''
135 # Refresh the status, and raise exception if the task is deleted
136 self.refresh(only_fields=['status'])
139 raise self.DeletedTask("Task was already deleted")
141 self.warrior.execute_command([self['uuid'], 'delete'], config_override={
142 'confirmation': 'no',
145 # Refresh the status again, so that we have updated info stored
146 self.refresh(only_fields=['status'])
150 # Refresh, and raise exception if task is already completed/deleted
151 self.refresh(only_fields=['status'])
154 raise self.CompletedTask("Cannot complete a completed task")
156 raise self.DeletedTask("Deleted task cannot be completed")
158 self.warrior.execute_command([self['uuid'], 'done'])
160 # Refresh the status again, so that we have updated info stored
161 self.refresh(only_fields=['status'])
164 args = [self['uuid'], 'modify'] if self['uuid'] else ['add']
165 args.extend(self._get_modified_fields_as_args())
166 self.warrior.execute_command(args)
167 self._modified_fields.clear()
169 def add_annotation(self, annotation):
170 args = [self['uuid'], 'annotate', annotation]
171 self.warrior.execute_command(args)
172 self.refresh(only_fields=['annotations'])
174 def remove_annotation(self, annotation):
175 if isinstance(annotation, TaskAnnotation):
176 annotation = annotation['description']
177 args = [self['uuid'], 'denotate', annotation]
178 self.warrior.execute_command(args)
179 self.refresh(only_fields=['annotations'])
181 def _get_modified_fields_as_args(self):
183 for field in self._modified_fields:
184 args.append('{}:{}'.format(field, self._data[field]))
187 def refresh(self, only_fields=[]):
188 args = [self['uuid'], 'export']
189 new_data = json.loads(self.warrior.execute_command(args)[0])
192 [(k, new_data.get(k)) for k in only_fields])
193 self._data.update(to_update)
195 self._data = new_data
198 class TaskFilter(object):
200 A set of parameters to filter the task list with.
203 def __init__(self, filter_params=[]):
204 self.filter_params = filter_params
206 def add_filter(self, filter_str):
207 self.filter_params.append(filter_str)
209 def add_filter_param(self, key, value):
210 key = key.replace('__', '.')
212 # Replace the value with empty string, since that is the
213 # convention in TW for empty values
214 value = value if value is not None else ''
215 self.filter_params.append('{0}:{1}'.format(key, value))
217 def get_filter_params(self):
218 return [f for f in self.filter_params if f]
222 c.filter_params = list(self.filter_params)
226 class TaskQuerySet(object):
228 Represents a lazy lookup for a task objects.
231 def __init__(self, warrior=None, filter_obj=None):
232 self.warrior = warrior
233 self._result_cache = None
234 self.filter_obj = filter_obj or TaskFilter()
236 def __deepcopy__(self, memo):
238 Deep copy of a QuerySet doesn't populate the cache
240 obj = self.__class__()
241 for k, v in self.__dict__.items():
242 if k in ('_iter', '_result_cache'):
243 obj.__dict__[k] = None
245 obj.__dict__[k] = copy.deepcopy(v, memo)
249 data = list(self[:REPR_OUTPUT_SIZE + 1])
250 if len(data) > REPR_OUTPUT_SIZE:
251 data[-1] = "...(remaining elements truncated)..."
255 if self._result_cache is None:
256 self._result_cache = list(self)
257 return len(self._result_cache)
260 if self._result_cache is None:
261 self._result_cache = self._execute()
262 return iter(self._result_cache)
264 def __getitem__(self, k):
265 if self._result_cache is None:
266 self._result_cache = list(self)
267 return self._result_cache.__getitem__(k)
270 if self._result_cache is not None:
271 return bool(self._result_cache)
274 except StopIteration:
278 def __nonzero__(self):
279 return type(self).__bool__(self)
281 def _clone(self, klass=None, **kwargs):
283 klass = self.__class__
284 filter_obj = self.filter_obj.clone()
285 c = klass(warrior=self.warrior, filter_obj=filter_obj)
286 c.__dict__.update(kwargs)
291 Fetch the tasks which match the current filters.
293 return self.warrior.filter_tasks(self.filter_obj)
297 Returns a new TaskQuerySet that is a copy of the current one.
302 return self.filter(status=PENDING)
305 return self.filter(status=COMPLETED)
307 def filter(self, *args, **kwargs):
309 Returns a new TaskQuerySet with the given filters added.
311 clone = self._clone()
313 clone.filter_obj.add_filter(f)
314 for key, value in kwargs.items():
315 clone.filter_obj.add_filter_param(key, value)
318 def get(self, **kwargs):
320 Performs the query and returns a single object matching the given
323 clone = self.filter(**kwargs)
326 return clone._result_cache[0]
328 raise Task.DoesNotExist(
329 'Task matching query does not exist. '
330 'Lookup parameters were {0}'.format(kwargs))
332 'get() returned more than one Task -- it returned {0}! '
333 'Lookup parameters were {1}'.format(num, kwargs))
336 class TaskWarrior(object):
337 def __init__(self, data_location='~/.task', create=True):
338 data_location = os.path.expanduser(data_location)
339 if create and not os.path.exists(data_location):
340 os.makedirs(data_location)
342 'data.location': os.path.expanduser(data_location),
344 self.tasks = TaskQuerySet(self)
346 def _get_command_args(self, args, config_override={}):
347 command_args = ['task', 'rc:/']
348 config = self.config.copy()
349 config.update(config_override)
350 for item in config.items():
351 command_args.append('rc.{0}={1}'.format(*item))
352 command_args.extend(map(str, args))
355 def execute_command(self, args, config_override={}):
356 command_args = self._get_command_args(
357 args, config_override=config_override)
358 logger.debug(' '.join(command_args))
359 p = subprocess.Popen(command_args, stdout=subprocess.PIPE,
360 stderr=subprocess.PIPE)
361 stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
364 error_msg = stderr.strip().splitlines()[-1]
366 error_msg = stdout.strip()
367 raise TaskWarriorException(error_msg)
368 return stdout.strip().split('\n')
370 def filter_tasks(self, filter_obj):
371 args = ['export', '--'] + filter_obj.get_filter_params()
373 for line in self.execute_command(args):
375 data = line.strip(',')
377 tasks.append(Task(self, json.loads(data)))
379 raise TaskWarriorException('Invalid JSON: %s' % data)
382 def merge_with(self, path, push=False):
383 path = path.rstrip('/') + '/'
384 self.execute_command(['merge', path], config_override={
385 'merge.autopush': 'yes' if push else 'no',
389 self.execute_command(['undo'], config_override={
390 'confirmation': 'no',