]> git.madduck.net Git - etc/taskwarrior.git/blob - tasklib/task.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Task: Check that we are unable to set read only values through __init__
[etc/taskwarrior.git] / tasklib / task.py
1 from __future__ import print_function
2 import copy
3 import datetime
4 import json
5 import logging
6 import os
7 import six
8 import subprocess
9
10 DATE_FORMAT = '%Y%m%dT%H%M%SZ'
11 REPR_OUTPUT_SIZE = 10
12 PENDING = 'pending'
13 COMPLETED = 'completed'
14
15 VERSION_2_1_0 = six.u('2.1.0')
16 VERSION_2_2_0 = six.u('2.2.0')
17 VERSION_2_3_0 = six.u('2.3.0')
18 VERSION_2_4_0 = six.u('2.4.0')
19
20 logger = logging.getLogger(__name__)
21
22
23 class TaskWarriorException(Exception):
24     pass
25
26
27 class TaskResource(object):
28     read_only_fields = []
29
30     def _load_data(self, data):
31         self._data = data
32         # We need to use a copy for original data, so that changes
33         # are not propagated. Shallow copy is alright, since data dict uses only
34         # primitive data types
35         self._original_data = data.copy()
36
37     def _update_data(self, data, update_original=False):
38         """
39         Low level update of the internal _data dict. Data which are coming as
40         updates should already be serialized. If update_original is True, the
41         original_data dict is updated as well.
42         """
43
44         self._data.update(data)
45
46         if update_original:
47             self._original_data.update(data)
48
49     def __getitem__(self, key):
50         # This is a workaround to make TaskResource non-iterable
51         # over simple index-based iteration
52         try:
53             int(key)
54             raise StopIteration
55         except ValueError:
56             pass
57
58         return self._deserialize(key, self._data.get(key))
59
60     def __setitem__(self, key, value):
61         if key in self.read_only_fields:
62             raise RuntimeError('Field \'%s\' is read-only' % key)
63         self._data[key] = self._serialize(key, value)
64
65     def _deserialize(self, key, value):
66         hydrate_func = getattr(self, 'deserialize_{0}'.format(key),
67                                lambda x: x)
68         return hydrate_func(value)
69
70     def _serialize(self, key, value):
71         dehydrate_func = getattr(self, 'serialize_{0}'.format(key),
72                                  lambda x: x)
73         return dehydrate_func(value)
74
75     def __str__(self):
76         s = six.text_type(self.__unicode__())
77         if not six.PY3:
78             s = s.encode('utf-8')
79         return s
80
81     def __repr__(self):
82         return str(self)
83
84
85 class TaskAnnotation(TaskResource):
86     read_only_fields = ['entry', 'description']
87
88     def __init__(self, task, data={}):
89         self.task = task
90         self._load_data(data)
91
92     def deserialize_entry(self, data):
93         return datetime.datetime.strptime(data, DATE_FORMAT) if data else None
94
95     def serialize_entry(self, date):
96         return date.strftime(DATE_FORMAT) if date else ''
97
98     def remove(self):
99         self.task.remove_annotation(self)
100
101     def __unicode__(self):
102         return self['description']
103
104     __repr__ = __unicode__
105
106
107 class Task(TaskResource):
108     read_only_fields = ['id', 'entry', 'urgency', 'uuid', 'modified']
109
110     class DoesNotExist(Exception):
111         pass
112
113     class CompletedTask(Exception):
114         """
115         Raised when the operation cannot be performed on the completed task.
116         """
117         pass
118
119     class DeletedTask(Exception):
120         """
121         Raised when the operation cannot be performed on the deleted task.
122         """
123         pass
124
125     class NotSaved(Exception):
126         """
127         Raised when the operation cannot be performed on the task, because
128         it has not been saved to TaskWarrior yet.
129         """
130         pass
131
132     def __init__(self, warrior, **kwargs):
133         self.warrior = warrior
134
135         # Check that user is not able to set read-only value in __init__
136         for key in kwargs.keys():
137             if key in self.read_only_fields:
138                 raise RuntimeError('Field \'%s\' is read-only' % key)
139
140         # We serialize the data in kwargs so that users of the library
141         # do not have to pass different data formats via __setitem__ and
142         # __init__ methods, that would be confusing
143
144         # Rather unfortunate syntax due to python2.6 comaptiblity
145         self._load_data(dict((key, self._serialize(key, value))
146                         for (key, value) in six.iteritems(kwargs)))
147
148     def __unicode__(self):
149         return self['description']
150
151     def __eq__(self, other):
152         if self['uuid'] and other['uuid']:
153             # For saved Tasks, just define equality by equality of uuids
154             return self['uuid'] == other['uuid']
155         else:
156             # If the tasks are not saved, compare the actual instances
157             return id(self) == id(other)
158
159
160     def __hash__(self):
161         if self['uuid']:
162             # For saved Tasks, just define equality by equality of uuids
163             return self['uuid'].__hash__()
164         else:
165             # If the tasks are not saved, return hash of instance id
166             return id(self).__hash__()
167
168     @property
169     def _modified_fields(self):
170         writable_fields = set(self._data.keys()) - set(self.read_only_fields)
171         for key in writable_fields:
172             if self._data.get(key) != self._original_data.get(key):
173                 yield key
174
175     @property
176     def completed(self):
177         return self['status'] == six.text_type('completed')
178
179     @property
180     def deleted(self):
181         return self['status'] == six.text_type('deleted')
182
183     @property
184     def waiting(self):
185         return self['status'] == six.text_type('waiting')
186
187     @property
188     def pending(self):
189         return self['status'] == six.text_type('pending')
190
191     @property
192     def saved(self):
193         return self['uuid'] is not None or self['id'] is not None
194
195     def serialize_due(self, date):
196         if not date:
197             return None
198         return date.strftime(DATE_FORMAT)
199
200     def deserialize_due(self, date_str):
201         if not date_str:
202             return None
203         return datetime.datetime.strptime(date_str, DATE_FORMAT)
204
205     def serialize_depends(self, cur_dependencies):
206         # Check that all the tasks are saved
207         for task in cur_dependencies:
208             if not task.saved:
209                 raise Task.NotSaved('Task \'%s\' needs to be saved before '
210                                     'it can be set as dependency.' % task)
211
212         # Return the list of uuids
213         return ','.join(task['uuid'] for task in cur_dependencies)
214
215     def deserialize_depends(self, raw_uuids):
216         raw_uuids = raw_uuids or ''  # Convert None to empty string
217         uuids = raw_uuids.split(',')
218         return set(self.warrior.tasks.get(uuid=uuid) for uuid in uuids if uuid)
219
220     def format_depends(self):
221         # We need to generate added and removed dependencies list,
222         # since Taskwarrior does not accept redefining dependencies.
223
224         # This cannot be part of serialize_depends, since we need
225         # to keep a list of all depedencies in the _data dictionary,
226         # not just currently added/removed ones
227
228         old_dependencies_raw = self._original_data.get('depends','')
229         old_dependencies = self.deserialize_depends(old_dependencies_raw)
230
231         added = self['depends'] - old_dependencies
232         removed = old_dependencies - self['depends']
233
234         # Removed dependencies need to be prefixed with '-'
235         return 'depends:' + ','.join(
236                 [t['uuid'] for t in added] +
237                 ['-' + t['uuid'] for t in removed]
238             )
239
240     def deserialize_annotations(self, data):
241         return [TaskAnnotation(self, d) for d in data] if data else []
242
243     def deserialize_tags(self, tags):
244         if isinstance(tags, basestring):
245             return tags.split(',') if tags else []
246         return tags
247
248     def serialize_tags(self, tags):
249         return ','.join(tags) if tags else ''
250
251     def format_description(self):
252         # Task version older than 2.4.0 ignores first word of the
253         # task description if description: prefix is used
254         if self.warrior.version < VERSION_2_4_0:
255             return self._data['description']
256         else:
257             return "description:'{0}'".format(self._data['description'] or '')
258
259     def delete(self):
260         if not self.saved:
261             raise Task.NotSaved("Task needs to be saved before it can be deleted")
262
263         # Refresh the status, and raise exception if the task is deleted
264         self.refresh(only_fields=['status'])
265
266         if self.deleted:
267             raise Task.DeletedTask("Task was already deleted")
268
269         self.warrior.execute_command([self['uuid'], 'delete'])
270
271         # Refresh the status again, so that we have updated info stored
272         self.refresh(only_fields=['status'])
273
274
275     def done(self):
276         if not self.saved:
277             raise Task.NotSaved("Task needs to be saved before it can be completed")
278
279         # Refresh, and raise exception if task is already completed/deleted
280         self.refresh(only_fields=['status'])
281
282         if self.completed:
283             raise Task.CompletedTask("Cannot complete a completed task")
284         elif self.deleted:
285             raise Task.DeletedTask("Deleted task cannot be completed")
286
287         self.warrior.execute_command([self['uuid'], 'done'])
288
289         # Refresh the status again, so that we have updated info stored
290         self.refresh(only_fields=['status'])
291
292     def save(self):
293         args = [self['uuid'], 'modify'] if self.saved else ['add']
294         args.extend(self._get_modified_fields_as_args())
295         output = self.warrior.execute_command(args)
296
297         # Parse out the new ID, if the task is being added for the first time
298         if not self.saved:
299             id_lines = [l for l in output if l.startswith('Created task ')]
300
301             # Complain loudly if it seems that more tasks were created
302             # Should not happen
303             if len(id_lines) != 1 or len(id_lines[0].split(' ')) != 3:
304                 raise TaskWarriorException("Unexpected output when creating "
305                                            "task: %s" % '\n'.join(id_lines))
306
307             # Circumvent the ID storage, since ID is considered read-only
308             self._data['id'] = int(id_lines[0].split(' ')[2].rstrip('.'))
309
310         self.refresh()
311
312     def add_annotation(self, annotation):
313         if not self.saved:
314             raise Task.NotSaved("Task needs to be saved to add annotation")
315
316         args = [self['uuid'], 'annotate', annotation]
317         self.warrior.execute_command(args)
318         self.refresh(only_fields=['annotations'])
319
320     def remove_annotation(self, annotation):
321         if not self.saved:
322             raise Task.NotSaved("Task needs to be saved to add annotation")
323
324         if isinstance(annotation, TaskAnnotation):
325             annotation = annotation['description']
326         args = [self['uuid'], 'denotate', annotation]
327         self.warrior.execute_command(args)
328         self.refresh(only_fields=['annotations'])
329
330     def _get_modified_fields_as_args(self):
331         args = []
332
333         def add_field(field):
334             # Add the output of format_field method to args list (defaults to
335             # field:value)
336             format_default = lambda k: "{0}:'{1}'".format(k, self._data[k] or '')
337             format_func = getattr(self, 'format_{0}'.format(field),
338                                   lambda: format_default(field))
339             args.append(format_func())
340
341         # If we're modifying saved task, simply pass on all modified fields
342         if self.saved:
343             for field in self._modified_fields:
344                 add_field(field)
345         # For new tasks, pass all fields that make sense
346         else:
347             for field in self._data.keys():
348                 if field in self.read_only_fields:
349                     continue
350                 add_field(field)
351
352         return args
353
354     def refresh(self, only_fields=[]):
355         # Raise error when trying to refresh a task that has not been saved
356         if not self.saved:
357             raise Task.NotSaved("Task needs to be saved to be refreshed")
358
359         # We need to use ID as backup for uuid here for the refreshes
360         # of newly saved tasks. Any other place in the code is fine
361         # with using UUID only.
362         args = [self['uuid'] or self['id'], 'export']
363         new_data = json.loads(self.warrior.execute_command(args)[0])
364         if only_fields:
365             to_update = dict(
366                 [(k, new_data.get(k)) for k in only_fields])
367             self._update_data(to_update, update_original=True)
368         else:
369             self._load_data(new_data)
370
371
372 class TaskFilter(object):
373     """
374     A set of parameters to filter the task list with.
375     """
376
377     def __init__(self, filter_params=[]):
378         self.filter_params = filter_params
379
380     def add_filter(self, filter_str):
381         self.filter_params.append(filter_str)
382
383     def add_filter_param(self, key, value):
384         key = key.replace('__', '.')
385
386         # Replace the value with empty string, since that is the
387         # convention in TW for empty values
388         value = value if value is not None else ''
389
390         # If we are filtering by uuid:, do not use uuid keyword
391         # due to TW-1452 bug
392         if key == 'uuid':
393             self.filter_params.insert(0, value)
394         else:
395             self.filter_params.append('{0}:{1}'.format(key, value))
396
397     def get_filter_params(self):
398         return [f for f in self.filter_params if f]
399
400     def clone(self):
401         c = self.__class__()
402         c.filter_params = list(self.filter_params)
403         return c
404
405
406 class TaskQuerySet(object):
407     """
408     Represents a lazy lookup for a task objects.
409     """
410
411     def __init__(self, warrior=None, filter_obj=None):
412         self.warrior = warrior
413         self._result_cache = None
414         self.filter_obj = filter_obj or TaskFilter()
415
416     def __deepcopy__(self, memo):
417         """
418         Deep copy of a QuerySet doesn't populate the cache
419         """
420         obj = self.__class__()
421         for k, v in self.__dict__.items():
422             if k in ('_iter', '_result_cache'):
423                 obj.__dict__[k] = None
424             else:
425                 obj.__dict__[k] = copy.deepcopy(v, memo)
426         return obj
427
428     def __repr__(self):
429         data = list(self[:REPR_OUTPUT_SIZE + 1])
430         if len(data) > REPR_OUTPUT_SIZE:
431             data[-1] = "...(remaining elements truncated)..."
432         return repr(data)
433
434     def __len__(self):
435         if self._result_cache is None:
436             self._result_cache = list(self)
437         return len(self._result_cache)
438
439     def __iter__(self):
440         if self._result_cache is None:
441             self._result_cache = self._execute()
442         return iter(self._result_cache)
443
444     def __getitem__(self, k):
445         if self._result_cache is None:
446             self._result_cache = list(self)
447         return self._result_cache.__getitem__(k)
448
449     def __bool__(self):
450         if self._result_cache is not None:
451             return bool(self._result_cache)
452         try:
453             next(iter(self))
454         except StopIteration:
455             return False
456         return True
457
458     def __nonzero__(self):
459         return type(self).__bool__(self)
460
461     def _clone(self, klass=None, **kwargs):
462         if klass is None:
463             klass = self.__class__
464         filter_obj = self.filter_obj.clone()
465         c = klass(warrior=self.warrior, filter_obj=filter_obj)
466         c.__dict__.update(kwargs)
467         return c
468
469     def _execute(self):
470         """
471         Fetch the tasks which match the current filters.
472         """
473         return self.warrior.filter_tasks(self.filter_obj)
474
475     def all(self):
476         """
477         Returns a new TaskQuerySet that is a copy of the current one.
478         """
479         return self._clone()
480
481     def pending(self):
482         return self.filter(status=PENDING)
483
484     def completed(self):
485         return self.filter(status=COMPLETED)
486
487     def filter(self, *args, **kwargs):
488         """
489         Returns a new TaskQuerySet with the given filters added.
490         """
491         clone = self._clone()
492         for f in args:
493             clone.filter_obj.add_filter(f)
494         for key, value in kwargs.items():
495             clone.filter_obj.add_filter_param(key, value)
496         return clone
497
498     def get(self, **kwargs):
499         """
500         Performs the query and returns a single object matching the given
501         keyword arguments.
502         """
503         clone = self.filter(**kwargs)
504         num = len(clone)
505         if num == 1:
506             return clone._result_cache[0]
507         if not num:
508             raise Task.DoesNotExist(
509                 'Task matching query does not exist. '
510                 'Lookup parameters were {0}'.format(kwargs))
511         raise ValueError(
512             'get() returned more than one Task -- it returned {0}! '
513             'Lookup parameters were {1}'.format(num, kwargs))
514
515
516 class TaskWarrior(object):
517     def __init__(self, data_location='~/.task', create=True):
518         data_location = os.path.expanduser(data_location)
519         if create and not os.path.exists(data_location):
520             os.makedirs(data_location)
521         self.config = {
522             'data.location': os.path.expanduser(data_location),
523             'confirmation': 'no',
524         }
525         self.tasks = TaskQuerySet(self)
526         self.version = self._get_version()
527
528     def _get_command_args(self, args, config_override={}):
529         command_args = ['task', 'rc:/']
530         config = self.config.copy()
531         config.update(config_override)
532         for item in config.items():
533             command_args.append('rc.{0}={1}'.format(*item))
534         command_args.extend(map(str, args))
535         return command_args
536
537     def _get_version(self):
538         p = subprocess.Popen(
539                 ['task', '--version'],
540                 stdout=subprocess.PIPE,
541                 stderr=subprocess.PIPE)
542         stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
543         return stdout.strip('\n')
544
545     def execute_command(self, args, config_override={}):
546         command_args = self._get_command_args(
547             args, config_override=config_override)
548         logger.debug(' '.join(command_args))
549         p = subprocess.Popen(command_args, stdout=subprocess.PIPE,
550                              stderr=subprocess.PIPE)
551         stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
552         if p.returncode:
553             if stderr.strip():
554                 error_msg = stderr.strip().splitlines()[-1]
555             else:
556                 error_msg = stdout.strip()
557             raise TaskWarriorException(error_msg)
558         return stdout.strip().split('\n')
559
560     def filter_tasks(self, filter_obj):
561         args = ['export', '--'] + filter_obj.get_filter_params()
562         tasks = []
563         for line in self.execute_command(args):
564             if line:
565                 data = line.strip(',')
566                 try:
567                     filtered_task = Task(self)
568                     filtered_task._load_data(json.loads(data))
569                     tasks.append(filtered_task)
570                 except ValueError:
571                     raise TaskWarriorException('Invalid JSON: %s' % data)
572         return tasks
573
574     def merge_with(self, path, push=False):
575         path = path.rstrip('/') + '/'
576         self.execute_command(['merge', path], config_override={
577             'merge.autopush': 'yes' if push else 'no',
578         })
579
580     def undo(self):
581         self.execute_command(['undo'])