All patches and comments are welcome. Please squash your changes to logical
commits before using git-format-patch and git-send-email to
patches@git.madduck.net.
If you'd read over the Git project's submission guidelines and adhered to them,
I'd be especially grateful.
1 from __future__ import print_function
11 DATE_FORMAT = '%Y%m%dT%H%M%SZ'
14 COMPLETED = 'completed'
16 VERSION_2_1_0 = six.u('2.1.0')
17 VERSION_2_2_0 = six.u('2.2.0')
18 VERSION_2_3_0 = six.u('2.3.0')
19 VERSION_2_4_0 = six.u('2.4.0')
21 logger = logging.getLogger(__name__)
24 class TaskWarriorException(Exception):
28 class SerializingObject(object):
30 Common ancestor for TaskResource & TaskFilter, since they both
31 need to serialize arguments.
34 def _deserialize(self, key, value):
35 hydrate_func = getattr(self, 'deserialize_{0}'.format(key),
36 lambda x: x if x != '' else None)
37 return hydrate_func(value)
39 def _serialize(self, key, value):
40 dehydrate_func = getattr(self, 'serialize_{0}'.format(key),
41 lambda x: x if x is not None else '')
42 return dehydrate_func(value)
44 def timestamp_serializer(self, date):
47 return date.strftime(DATE_FORMAT)
49 def timestamp_deserializer(self, date_str):
52 return datetime.datetime.strptime(date_str, DATE_FORMAT)
54 def serialize_entry(self, value):
55 return self.timestamp_serializer(value)
57 def deserialize_entry(self, value):
58 return self.timestamp_deserializer(value)
60 def serialize_modified(self, value):
61 return self.timestamp_serializer(value)
63 def deserialize_modified(self, value):
64 return self.timestamp_deserializer(value)
66 def serialize_due(self, value):
67 return self.timestamp_serializer(value)
69 def deserialize_due(self, value):
70 return self.timestamp_deserializer(value)
72 def serialize_scheduled(self, value):
73 return self.timestamp_serializer(value)
75 def deserialize_scheduled(self, value):
76 return self.timestamp_deserializer(value)
78 def serialize_until(self, value):
79 return self.timestamp_serializer(value)
81 def deserialize_until(self, value):
82 return self.timestamp_deserializer(value)
84 def serialize_wait(self, value):
85 return self.timestamp_serializer(value)
87 def deserialize_wait(self, value):
88 return self.timestamp_deserializer(value)
90 def deserialize_annotations(self, data):
91 return [TaskAnnotation(self, d) for d in data] if data else []
93 def serialize_tags(self, tags):
94 return ','.join(tags) if tags else ''
96 def deserialize_tags(self, tags):
97 if isinstance(tags, six.string_types):
98 return tags.split(',') if tags else []
101 def serialize_depends(self, cur_dependencies):
102 # Return the list of uuids
103 return ','.join(task['uuid'] for task in cur_dependencies)
105 def deserialize_depends(self, raw_uuids):
106 raw_uuids = raw_uuids or '' # Convert None to empty string
107 uuids = raw_uuids.split(',')
108 return set(self.warrior.tasks.get(uuid=uuid) for uuid in uuids if uuid)
111 class TaskResource(SerializingObject):
112 read_only_fields = []
114 def _load_data(self, data):
115 self._data = dict((key, self._deserialize(key, value))
116 for key, value in data.items())
117 # We need to use a copy for original data, so that changes
118 # are not propagated.
119 self._original_data = copy.deepcopy(self._data)
121 def _update_data(self, data, update_original=False):
123 Low level update of the internal _data dict. Data which are coming as
124 updates should already be serialized. If update_original is True, the
125 original_data dict is updated as well.
127 self._data.update(dict((key, self._deserialize(key, value))
128 for key, value in data.items()))
131 self._original_data = copy.deepcopy(self._data)
134 def __getitem__(self, key):
135 # This is a workaround to make TaskResource non-iterable
136 # over simple index-based iteration
143 if key not in self._data:
144 self._data[key] = self._deserialize(key, None)
146 return self._data.get(key)
148 def __setitem__(self, key, value):
149 if key in self.read_only_fields:
150 raise RuntimeError('Field \'%s\' is read-only' % key)
151 self._data[key] = value
154 s = six.text_type(self.__unicode__())
156 s = s.encode('utf-8')
163 class TaskAnnotation(TaskResource):
164 read_only_fields = ['entry', 'description']
166 def __init__(self, task, data={}):
168 self._load_data(data)
171 self.task.remove_annotation(self)
173 def __unicode__(self):
174 return self['description']
176 def __eq__(self, other):
177 # consider 2 annotations equal if they belong to the same task, and
178 # their data dics are the same
179 return self.task == other.task and self._data == other._data
181 __repr__ = __unicode__
184 class Task(TaskResource):
185 read_only_fields = ['id', 'entry', 'urgency', 'uuid', 'modified']
187 class DoesNotExist(Exception):
190 class CompletedTask(Exception):
192 Raised when the operation cannot be performed on the completed task.
196 class DeletedTask(Exception):
198 Raised when the operation cannot be performed on the deleted task.
202 class NotSaved(Exception):
204 Raised when the operation cannot be performed on the task, because
205 it has not been saved to TaskWarrior yet.
210 def from_input(cls, input_file=sys.stdin, modify=None):
212 Creates a Task object, directly from the stdin, by reading one line.
213 If modify=True, two lines are used, first line interpreted as the
214 original state of the Task object, and second line as its new,
215 modified value. This is consistent with the TaskWarrior's hook
218 Object created by this method should not be saved, deleted
219 or refreshed, as t could create a infinite loop. For this
220 reason, TaskWarrior instance is set to None.
222 Input_file argument can be used to specify the input file,
223 but defaults to sys.stdin.
226 # TaskWarrior instance is set to None
229 # Detect the hook type if not given directly
230 name = os.path.basename(sys.argv[0])
231 modify = name.startswith('on-modify') if modify is None else modify
233 # Load the data from the input
234 task._load_data(json.loads(input_file.readline().strip()))
236 # If this is a on-modify event, we are provided with additional
237 # line of input, which provides updated data
239 task._update_data(json.loads(input_file.readline().strip()))
243 def __init__(self, warrior, **kwargs):
244 self.warrior = warrior
246 # Check that user is not able to set read-only value in __init__
247 for key in kwargs.keys():
248 if key in self.read_only_fields:
249 raise RuntimeError('Field \'%s\' is read-only' % key)
251 # We serialize the data in kwargs so that users of the library
252 # do not have to pass different data formats via __setitem__ and
253 # __init__ methods, that would be confusing
255 # Rather unfortunate syntax due to python2.6 comaptiblity
256 self._load_data(dict((key, self._serialize(key, value))
257 for (key, value) in six.iteritems(kwargs)))
259 def __unicode__(self):
260 return self['description']
262 def __eq__(self, other):
263 if self['uuid'] and other['uuid']:
264 # For saved Tasks, just define equality by equality of uuids
265 return self['uuid'] == other['uuid']
267 # If the tasks are not saved, compare the actual instances
268 return id(self) == id(other)
273 # For saved Tasks, just define equality by equality of uuids
274 return self['uuid'].__hash__()
276 # If the tasks are not saved, return hash of instance id
277 return id(self).__hash__()
280 def _modified_fields(self):
281 writable_fields = set(self._data.keys()) - set(self.read_only_fields)
282 for key in writable_fields:
283 if self._data.get(key) != self._original_data.get(key):
288 return bool(list(self._modified_fields))
292 return self['status'] == six.text_type('completed')
296 return self['status'] == six.text_type('deleted')
300 return self['status'] == six.text_type('waiting')
304 return self['status'] == six.text_type('pending')
308 return self['uuid'] is not None or self['id'] is not None
310 def serialize_depends(self, cur_dependencies):
311 # Check that all the tasks are saved
312 for task in cur_dependencies:
314 raise Task.NotSaved('Task \'%s\' needs to be saved before '
315 'it can be set as dependency.' % task)
317 return super(Task, self).serialize_depends(cur_dependencies)
319 def format_depends(self):
320 # We need to generate added and removed dependencies list,
321 # since Taskwarrior does not accept redefining dependencies.
323 # This cannot be part of serialize_depends, since we need
324 # to keep a list of all depedencies in the _data dictionary,
325 # not just currently added/removed ones
327 old_dependencies = self._original_data.get('depends', set())
329 added = self['depends'] - old_dependencies
330 removed = old_dependencies - self['depends']
332 # Removed dependencies need to be prefixed with '-'
333 return 'depends:' + ','.join(
334 [t['uuid'] for t in added] +
335 ['-' + t['uuid'] for t in removed]
338 def format_description(self):
339 # Task version older than 2.4.0 ignores first word of the
340 # task description if description: prefix is used
341 if self.warrior.version < VERSION_2_4_0:
342 return self._data['description']
344 return "description:'{0}'".format(self._data['description'] or '')
348 raise Task.NotSaved("Task needs to be saved before it can be deleted")
350 # Refresh the status, and raise exception if the task is deleted
351 self.refresh(only_fields=['status'])
354 raise Task.DeletedTask("Task was already deleted")
356 self.warrior.execute_command([self['uuid'], 'delete'])
358 # Refresh the status again, so that we have updated info stored
359 self.refresh(only_fields=['status'])
364 raise Task.NotSaved("Task needs to be saved before it can be completed")
366 # Refresh, and raise exception if task is already completed/deleted
367 self.refresh(only_fields=['status'])
370 raise Task.CompletedTask("Cannot complete a completed task")
372 raise Task.DeletedTask("Deleted task cannot be completed")
374 self.warrior.execute_command([self['uuid'], 'done'])
376 # Refresh the status again, so that we have updated info stored
377 self.refresh(only_fields=['status'])
380 if self.saved and not self.modified:
383 args = [self['uuid'], 'modify'] if self.saved else ['add']
384 args.extend(self._get_modified_fields_as_args())
385 output = self.warrior.execute_command(args)
387 # Parse out the new ID, if the task is being added for the first time
389 id_lines = [l for l in output if l.startswith('Created task ')]
391 # Complain loudly if it seems that more tasks were created
393 if len(id_lines) != 1 or len(id_lines[0].split(' ')) != 3:
394 raise TaskWarriorException("Unexpected output when creating "
395 "task: %s" % '\n'.join(id_lines))
397 # Circumvent the ID storage, since ID is considered read-only
398 self._data['id'] = int(id_lines[0].split(' ')[2].rstrip('.'))
400 # Refreshing is very important here, as not only modification time
401 # is updated, but arbitrary attribute may have changed due hooks
402 # altering the data before saving
405 def add_annotation(self, annotation):
407 raise Task.NotSaved("Task needs to be saved to add annotation")
409 args = [self['uuid'], 'annotate', annotation]
410 self.warrior.execute_command(args)
411 self.refresh(only_fields=['annotations'])
413 def remove_annotation(self, annotation):
415 raise Task.NotSaved("Task needs to be saved to remove annotation")
417 if isinstance(annotation, TaskAnnotation):
418 annotation = annotation['description']
419 args = [self['uuid'], 'denotate', annotation]
420 self.warrior.execute_command(args)
421 self.refresh(only_fields=['annotations'])
423 def _get_modified_fields_as_args(self):
426 def add_field(field):
427 # Add the output of format_field method to args list (defaults to
429 serialized_value = self._serialize(field, self._data[field]) or ''
430 format_default = lambda: "{0}:{1}".format(
432 "'{0}'".format(serialized_value) if serialized_value else ''
434 format_func = getattr(self, 'format_{0}'.format(field),
436 args.append(format_func())
438 # If we're modifying saved task, simply pass on all modified fields
440 for field in self._modified_fields:
442 # For new tasks, pass all fields that make sense
444 for field in self._data.keys():
445 if field in self.read_only_fields:
451 def refresh(self, only_fields=[]):
452 # Raise error when trying to refresh a task that has not been saved
454 raise Task.NotSaved("Task needs to be saved to be refreshed")
456 # We need to use ID as backup for uuid here for the refreshes
457 # of newly saved tasks. Any other place in the code is fine
458 # with using UUID only.
459 args = [self['uuid'] or self['id'], 'export']
460 new_data = json.loads(self.warrior.execute_command(args)[0])
463 [(k, new_data.get(k)) for k in only_fields])
464 self._update_data(to_update, update_original=True)
466 self._load_data(new_data)
468 def export_data(self):
470 Exports current data contained in the Task as JSON
473 # We need to remove spaces for TW-1504, use custom separators
474 data_tuples = ((key, self._serialize(key, value))
475 for key, value in six.iteritems(self._data))
477 # Empty string denotes empty serialized value, we do not want
478 # to pass that to TaskWarrior.
479 data_tuples = filter(lambda t: t[1] is not '', data_tuples)
480 data = dict(data_tuples)
481 return json.dumps(data, separators=(',',':'))
483 class TaskFilter(SerializingObject):
485 A set of parameters to filter the task list with.
488 def __init__(self, filter_params=[]):
489 self.filter_params = filter_params
491 def add_filter(self, filter_str):
492 self.filter_params.append(filter_str)
494 def add_filter_param(self, key, value):
495 key = key.replace('__', '.')
497 # Replace the value with empty string, since that is the
498 # convention in TW for empty values
499 attribute_key = key.split('.')[0]
500 value = self._serialize(attribute_key, value)
502 # If we are filtering by uuid:, do not use uuid keyword
505 self.filter_params.insert(0, value)
507 # Surround value with aphostrophes unless it's a empty string
508 value = "'%s'" % value if value else ''
510 # We enforce equality match by using 'is' (or 'none') modifier
511 # Without using this syntax, filter fails due to TW-1479
512 modifier = '.is' if value else '.none'
513 key = key + modifier if '.' not in key else key
515 self.filter_params.append("{0}:{1}".format(key, value))
517 def get_filter_params(self):
518 return [f for f in self.filter_params if f]
522 c.filter_params = list(self.filter_params)
526 class TaskQuerySet(object):
528 Represents a lazy lookup for a task objects.
531 def __init__(self, warrior=None, filter_obj=None):
532 self.warrior = warrior
533 self._result_cache = None
534 self.filter_obj = filter_obj or TaskFilter()
536 def __deepcopy__(self, memo):
538 Deep copy of a QuerySet doesn't populate the cache
540 obj = self.__class__()
541 for k, v in self.__dict__.items():
542 if k in ('_iter', '_result_cache'):
543 obj.__dict__[k] = None
545 obj.__dict__[k] = copy.deepcopy(v, memo)
549 data = list(self[:REPR_OUTPUT_SIZE + 1])
550 if len(data) > REPR_OUTPUT_SIZE:
551 data[-1] = "...(remaining elements truncated)..."
555 if self._result_cache is None:
556 self._result_cache = list(self)
557 return len(self._result_cache)
560 if self._result_cache is None:
561 self._result_cache = self._execute()
562 return iter(self._result_cache)
564 def __getitem__(self, k):
565 if self._result_cache is None:
566 self._result_cache = list(self)
567 return self._result_cache.__getitem__(k)
570 if self._result_cache is not None:
571 return bool(self._result_cache)
574 except StopIteration:
578 def __nonzero__(self):
579 return type(self).__bool__(self)
581 def _clone(self, klass=None, **kwargs):
583 klass = self.__class__
584 filter_obj = self.filter_obj.clone()
585 c = klass(warrior=self.warrior, filter_obj=filter_obj)
586 c.__dict__.update(kwargs)
591 Fetch the tasks which match the current filters.
593 return self.warrior.filter_tasks(self.filter_obj)
597 Returns a new TaskQuerySet that is a copy of the current one.
602 return self.filter(status=PENDING)
605 return self.filter(status=COMPLETED)
607 def filter(self, *args, **kwargs):
609 Returns a new TaskQuerySet with the given filters added.
611 clone = self._clone()
613 clone.filter_obj.add_filter(f)
614 for key, value in kwargs.items():
615 clone.filter_obj.add_filter_param(key, value)
618 def get(self, **kwargs):
620 Performs the query and returns a single object matching the given
623 clone = self.filter(**kwargs)
626 return clone._result_cache[0]
628 raise Task.DoesNotExist(
629 'Task matching query does not exist. '
630 'Lookup parameters were {0}'.format(kwargs))
632 'get() returned more than one Task -- it returned {0}! '
633 'Lookup parameters were {1}'.format(num, kwargs))
636 class TaskWarrior(object):
637 def __init__(self, data_location='~/.task', create=True):
638 data_location = os.path.expanduser(data_location)
639 if create and not os.path.exists(data_location):
640 os.makedirs(data_location)
642 'data.location': os.path.expanduser(data_location),
643 'confirmation': 'no',
644 'dependency.confirmation': 'no', # See TW-1483 or taskrc man page
646 self.tasks = TaskQuerySet(self)
647 self.version = self._get_version()
649 def _get_command_args(self, args, config_override={}):
650 command_args = ['task', 'rc:/']
651 config = self.config.copy()
652 config.update(config_override)
653 for item in config.items():
654 command_args.append('rc.{0}={1}'.format(*item))
655 command_args.extend(map(str, args))
658 def _get_version(self):
659 p = subprocess.Popen(
660 ['task', '--version'],
661 stdout=subprocess.PIPE,
662 stderr=subprocess.PIPE)
663 stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
664 return stdout.strip('\n')
666 def execute_command(self, args, config_override={}):
667 command_args = self._get_command_args(
668 args, config_override=config_override)
669 logger.debug(' '.join(command_args))
670 p = subprocess.Popen(command_args, stdout=subprocess.PIPE,
671 stderr=subprocess.PIPE)
672 stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
675 error_msg = stderr.strip().splitlines()[-1]
677 error_msg = stdout.strip()
678 raise TaskWarriorException(error_msg)
679 return stdout.strip().split('\n')
681 def filter_tasks(self, filter_obj):
682 args = ['export', '--'] + filter_obj.get_filter_params()
684 for line in self.execute_command(args):
686 data = line.strip(',')
688 filtered_task = Task(self)
689 filtered_task._load_data(json.loads(data))
690 tasks.append(filtered_task)
692 raise TaskWarriorException('Invalid JSON: %s' % data)
695 def merge_with(self, path, push=False):
696 path = path.rstrip('/') + '/'
697 self.execute_command(['merge', path], config_override={
698 'merge.autopush': 'yes' if push else 'no',
702 self.execute_command(['undo'])