X-Git-Url: https://git.madduck.net/etc/taskwarrior.git/blobdiff_plain/38ec8d045e14a40d6de7b6e5b07be0f856591ee2..60ad4e9ca34e12a29de76170a8b6ca746742a174:/tasklib/task.py diff --git a/tasklib/task.py b/tasklib/task.py index 50384bf..c7c9e6d 100644 --- a/tasklib/task.py +++ b/tasklib/task.py @@ -1,31 +1,23 @@ from __future__ import print_function import copy -import datetime +import importlib import json import logging import os -import pytz import six import sys -import subprocess -import tzlocal + +from .serializing import SerializingObject DATE_FORMAT = '%Y%m%dT%H%M%SZ' REPR_OUTPUT_SIZE = 10 PENDING = 'pending' COMPLETED = 'completed' - -VERSION_2_1_0 = six.u('2.1.0') -VERSION_2_2_0 = six.u('2.2.0') -VERSION_2_3_0 = six.u('2.3.0') -VERSION_2_4_0 = six.u('2.4.0') +DELETED = 'deleted' +WAITING = 'waiting' +RECURRING = 'recurring' logger = logging.getLogger(__name__) -local_zone = tzlocal.get_localzone() - - -class TaskWarriorException(Exception): - pass class ReadOnlyDictView(object): @@ -49,6 +41,11 @@ class ReadOnlyDictView(object): def __len__(self): return len(self.viewed_dict) + def __unicode__(self): + return six.u('ReadOnlyDictView: {0}'.format(repr(self.viewed_dict))) + + __repr__ = __unicode__ + def get(self, key, default=None): return copy.deepcopy(self.viewed_dict.get(key, default)) @@ -59,196 +56,6 @@ class ReadOnlyDictView(object): return [copy.deepcopy(v) for v in self.viewed_dict.values()] -class SerializingObject(object): - """ - Common ancestor for TaskResource & TaskFilter, since they both - need to serialize arguments. - - Serializing method should hold the following contract: - - any empty value (meaning removal of the attribute) - is deserialized into a empty string - - None denotes a empty value for any attribute - - Deserializing method should hold the following contract: - - None denotes a empty value for any attribute (however, - this is here as a safeguard, TaskWarrior currently does - not export empty-valued attributes) if the attribute - is not iterable (e.g. list or set), in which case - a empty iterable should be used. - - Normalizing methods should hold the following contract: - - They are used to validate and normalize the user input. - Any attribute value that comes from the user (during Task - initialization, assignign values to Task attributes, or - filtering by user-provided values of attributes) is first - validated and normalized using the normalize_{key} method. - - If validation or normalization fails, normalizer is expected - to raise ValueError. - """ - - def _deserialize(self, key, value): - hydrate_func = getattr(self, 'deserialize_{0}'.format(key), - lambda x: x if x != '' else None) - return hydrate_func(value) - - def _serialize(self, key, value): - dehydrate_func = getattr(self, 'serialize_{0}'.format(key), - lambda x: x if x is not None else '') - return dehydrate_func(value) - - def _normalize(self, key, value): - """ - Use normalize_ methods to normalize user input. Any user - input will be normalized at the moment it is used as filter, - or entered as a value of Task attribute. - """ - - # None value should not be converted by normalizer - if value is None: - return None - - normalize_func = getattr(self, 'normalize_{0}'.format(key), - lambda x: x) - - return normalize_func(value) - - def timestamp_serializer(self, date): - if not date: - return '' - - # Any serialized timestamp should be localized, we need to - # convert to UTC before converting to string (DATE_FORMAT uses UTC) - date = date.astimezone(pytz.utc) - - return date.strftime(DATE_FORMAT) - - def timestamp_deserializer(self, date_str): - if not date_str: - return None - - # Return timestamp localized in the local zone - naive_timestamp = datetime.datetime.strptime(date_str, DATE_FORMAT) - localized_timestamp = pytz.utc.localize(naive_timestamp) - return localized_timestamp.astimezone(local_zone) - - def serialize_entry(self, value): - return self.timestamp_serializer(value) - - def deserialize_entry(self, value): - return self.timestamp_deserializer(value) - - def normalize_entry(self, value): - return self.datetime_normalizer(value) - - def serialize_modified(self, value): - return self.timestamp_serializer(value) - - def deserialize_modified(self, value): - return self.timestamp_deserializer(value) - - def normalize_modified(self, value): - return self.datetime_normalizer(value) - - def serialize_due(self, value): - return self.timestamp_serializer(value) - - def deserialize_due(self, value): - return self.timestamp_deserializer(value) - - def normalize_due(self, value): - return self.datetime_normalizer(value) - - def serialize_scheduled(self, value): - return self.timestamp_serializer(value) - - def deserialize_scheduled(self, value): - return self.timestamp_deserializer(value) - - def normalize_scheduled(self, value): - return self.datetime_normalizer(value) - - def serialize_until(self, value): - return self.timestamp_serializer(value) - - def deserialize_until(self, value): - return self.timestamp_deserializer(value) - - def normalize_until(self, value): - return self.datetime_normalizer(value) - - def serialize_wait(self, value): - return self.timestamp_serializer(value) - - def deserialize_wait(self, value): - return self.timestamp_deserializer(value) - - def normalize_wait(self, value): - return self.datetime_normalizer(value) - - def serialize_annotations(self, value): - value = value if value is not None else [] - - # This may seem weird, but it's correct, we want to export - # a list of dicts as serialized value - serialized_annotations = [json.loads(annotation.export_data()) - for annotation in value] - return serialized_annotations if serialized_annotations else '' - - def deserialize_annotations(self, data): - return [TaskAnnotation(self, d) for d in data] if data else [] - - def serialize_tags(self, tags): - return ','.join(tags) if tags else '' - - def deserialize_tags(self, tags): - if isinstance(tags, six.string_types): - return tags.split(',') if tags else [] - return tags or [] - - def serialize_depends(self, value): - # Return the list of uuids - value = value if value is not None else set() - return ','.join(task['uuid'] for task in value) - - def deserialize_depends(self, raw_uuids): - raw_uuids = raw_uuids or '' # Convert None to empty string - uuids = raw_uuids.split(',') - return set(self.warrior.tasks.get(uuid=uuid) for uuid in uuids if uuid) - - def datetime_normalizer(self, value): - """ - Normalizes date/datetime value (considered to come from user input) - to localized datetime value. Following conversions happen: - - naive date -> localized datetime with the same date, and time=midnight - naive datetime -> localized datetime with the same value - localized datetime -> localized datetime (no conversion) - """ - - if (isinstance(value, datetime.date) - and not isinstance(value, datetime.datetime)): - # Convert to local midnight - value_full = datetime.datetime.combine(value, datetime.time.min) - localized = local_zone.localize(value_full) - elif isinstance(value, datetime.datetime) and value.tzinfo is None: - # Convert to localized datetime object - localized = local_zone.localize(value) - else: - # If the value is already localized, there is no need to change - # time zone at this point. Also None is a valid value too. - localized = value - - return localized - - def normalize_uuid(self, value): - # Enforce sane UUID - if not isinstance(value, six.string_types) or value == '': - raise ValueError("UUID must be a valid non-empty string, " - "not: {}".format(value)) - - return value - - class TaskResource(SerializingObject): read_only_fields = [] @@ -259,7 +66,7 @@ class TaskResource(SerializingObject): # are not propagated. self._original_data = copy.deepcopy(self._data) - def _update_data(self, data, update_original=False): + def _update_data(self, data, update_original=False, remove_missing=False): """ Low level update of the internal _data dict. Data which are coming as updates should already be serialized. If update_original is True, the @@ -268,10 +75,14 @@ class TaskResource(SerializingObject): self._data.update(dict((key, self._deserialize(key, value)) for key, value in data.items())) + # In certain situations, we want to treat missing keys as removals + if remove_missing: + for key in set(self._data.keys()) - set(data.keys()): + self._data[key] = None + if update_original: self._original_data = copy.deepcopy(self._data) - def __getitem__(self, key): # This is a workaround to make TaskResource non-iterable # over simple index-based iteration @@ -316,7 +127,7 @@ class TaskResource(SerializingObject): # to pass that to TaskWarrior. data_tuples = filter(lambda t: t[1] is not '', data_tuples) data = dict(data_tuples) - return json.dumps(data, separators=(',',':')) + return json.dumps(data, separators=(',', ':')) @property def _modified_fields(self): @@ -341,9 +152,10 @@ class TaskResource(SerializingObject): class TaskAnnotation(TaskResource): read_only_fields = ['entry', 'description'] - def __init__(self, task, data={}): + def __init__(self, task, data=None): self.task = task - self._load_data(data) + self._load_data(data or dict()) + super(TaskAnnotation, self).__init__(task.backend) def remove(self): self.task.remove_annotation(self) @@ -356,6 +168,9 @@ class TaskAnnotation(TaskResource): # their data dics are the same return self.task == other.task and self._data == other._data + def __ne__(self, other): + return not self.__eq__(other) + __repr__ = __unicode__ @@ -377,6 +192,18 @@ class Task(TaskResource): """ pass + class ActiveTask(Exception): + """ + Raised when the operation cannot be performed on the active task. + """ + pass + + class InactiveTask(Exception): + """ + Raised when the operation cannot be performed on an inactive task. + """ + pass + class NotSaved(Exception): """ Raised when the operation cannot be performed on the task, because @@ -385,7 +212,7 @@ class Task(TaskResource): pass @classmethod - def from_input(cls, input_file=sys.stdin, modify=None, warrior=None): + def from_input(cls, input_file=sys.stdin, modify=None, backend=None): """ Creates a Task object, directly from the stdin, by reading one line. If modify=True, two lines are used, first line interpreted as the @@ -406,12 +233,13 @@ class Task(TaskResource): modify = name.startswith('on-modify') if modify is None else modify # Create the TaskWarrior instance if none passed - if warrior is None: + if backend is None: + backends = importlib.import_module('tasklib.backends') hook_parent_dir = os.path.dirname(os.path.dirname(sys.argv[0])) - warrior = TaskWarrior(data_location=hook_parent_dir) + backend = backends.TaskWarrior(data_location=hook_parent_dir) # TaskWarrior instance is set to None - task = cls(warrior) + task = cls(backend) # Load the data from the input task._load_data(json.loads(input_file.readline().strip())) @@ -419,12 +247,13 @@ class Task(TaskResource): # If this is a on-modify event, we are provided with additional # line of input, which provides updated data if modify: - task._update_data(json.loads(input_file.readline().strip())) + task._update_data(json.loads(input_file.readline().strip()), + remove_missing=True) return task - def __init__(self, warrior, **kwargs): - self.warrior = warrior + def __init__(self, backend, **kwargs): + super(Task, self).__init__(backend) # Check that user is not able to set read-only value in __init__ for key in kwargs.keys(): @@ -454,6 +283,8 @@ class Task(TaskResource): # If the tasks are not saved, compare the actual instances return id(self) == id(other) + def __ne__(self, other): + return not self.__eq__(other) def __hash__(self): if self['uuid']: @@ -479,6 +310,14 @@ class Task(TaskResource): def pending(self): return self['status'] == six.text_type('pending') + @property + def recurring(self): + return self['status'] == six.text_type('recurring') + + @property + def active(self): + return self['start'] is not None + @property def saved(self): return self['uuid'] is not None or self['id'] is not None @@ -492,33 +331,6 @@ class Task(TaskResource): return super(Task, self).serialize_depends(cur_dependencies) - def format_depends(self): - # We need to generate added and removed dependencies list, - # since Taskwarrior does not accept redefining dependencies. - - # This cannot be part of serialize_depends, since we need - # to keep a list of all depedencies in the _data dictionary, - # not just currently added/removed ones - - old_dependencies = self._original_data.get('depends', set()) - - added = self['depends'] - old_dependencies - removed = old_dependencies - self['depends'] - - # Removed dependencies need to be prefixed with '-' - return 'depends:' + ','.join( - [t['uuid'] for t in added] + - ['-' + t['uuid'] for t in removed] - ) - - def format_description(self): - # Task version older than 2.4.0 ignores first word of the - # task description if description: prefix is used - if self.warrior.version < VERSION_2_4_0: - return self._data['description'] - else: - return "description:'{0}'".format(self._data['description'] or '') - def delete(self): if not self.saved: raise Task.NotSaved("Task needs to be saved before it can be deleted") @@ -529,11 +341,44 @@ class Task(TaskResource): if self.deleted: raise Task.DeletedTask("Task was already deleted") - self.warrior.execute_command([self['uuid'], 'delete']) + self.backend.delete_task(self) # Refresh the status again, so that we have updated info stored + self.refresh(only_fields=['status', 'start', 'end']) + + def start(self): + if not self.saved: + raise Task.NotSaved("Task needs to be saved before it can be started") + + # Refresh, and raise exception if task is already completed/deleted self.refresh(only_fields=['status']) + if self.completed: + raise Task.CompletedTask("Cannot start a completed task") + elif self.deleted: + raise Task.DeletedTask("Deleted task cannot be started") + elif self.active: + raise Task.ActiveTask("Task is already active") + + self.backend.start_task(self) + + # Refresh the status again, so that we have updated info stored + self.refresh(only_fields=['status', 'start']) + + def stop(self): + if not self.saved: + raise Task.NotSaved("Task needs to be saved before it can be stopped") + + # Refresh, and raise exception if task is already completed/deleted + self.refresh(only_fields=['status']) + + if not self.active: + raise Task.InactiveTask("Cannot stop an inactive task") + + self.backend.stop_task(self) + + # Refresh the status again, so that we have updated info stored + self.refresh(only_fields=['status', 'start']) def done(self): if not self.saved: @@ -547,43 +392,23 @@ class Task(TaskResource): elif self.deleted: raise Task.DeletedTask("Deleted task cannot be completed") - self.warrior.execute_command([self['uuid'], 'done']) + self.backend.complete_task(self) # Refresh the status again, so that we have updated info stored - self.refresh(only_fields=['status']) + self.refresh(only_fields=['status', 'start', 'end']) def save(self): if self.saved and not self.modified: return - args = [self['uuid'], 'modify'] if self.saved else ['add'] - args.extend(self._get_modified_fields_as_args()) - output = self.warrior.execute_command(args) - - # Parse out the new ID, if the task is being added for the first time - if not self.saved: - id_lines = [l for l in output if l.startswith('Created task ')] - - # Complain loudly if it seems that more tasks were created - # Should not happen - if len(id_lines) != 1 or len(id_lines[0].split(' ')) != 3: - raise TaskWarriorException("Unexpected output when creating " - "task: %s" % '\n'.join(id_lines)) - - # Circumvent the ID storage, since ID is considered read-only - self._data['id'] = int(id_lines[0].split(' ')[2].rstrip('.')) - - # Refreshing is very important here, as not only modification time - # is updated, but arbitrary attribute may have changed due hooks - # altering the data before saving - self.refresh() + # All the actual work is done by the backend + self.backend.save_task(self) def add_annotation(self, annotation): if not self.saved: raise Task.NotSaved("Task needs to be saved to add annotation") - args = [self['uuid'], 'annotate', annotation] - self.warrior.execute_command(args) + self.backend.annotate_task(self, annotation) self.refresh(only_fields=['annotations']) def remove_annotation(self, annotation): @@ -592,56 +417,17 @@ class Task(TaskResource): if isinstance(annotation, TaskAnnotation): annotation = annotation['description'] - args = [self['uuid'], 'denotate', annotation] - self.warrior.execute_command(args) - self.refresh(only_fields=['annotations']) - def _get_modified_fields_as_args(self): - args = [] - - def add_field(field): - # Add the output of format_field method to args list (defaults to - # field:value) - serialized_value = self._serialize(field, self._data[field]) - - # Empty values should not be enclosed in quotation marks, see - # TW-1510 - if serialized_value is '': - escaped_serialized_value = '' - else: - escaped_serialized_value = "'{0}'".format(serialized_value) - - format_default = lambda: "{0}:{1}".format(field, - escaped_serialized_value) - - format_func = getattr(self, 'format_{0}'.format(field), - format_default) - - args.append(format_func()) - - # If we're modifying saved task, simply pass on all modified fields - if self.saved: - for field in self._modified_fields: - add_field(field) - # For new tasks, pass all fields that make sense - else: - for field in self._data.keys(): - if field in self.read_only_fields: - continue - add_field(field) - - return args + self.backend.denotate_task(self, annotation) + self.refresh(only_fields=['annotations']) - def refresh(self, only_fields=[]): + def refresh(self, only_fields=None, after_save=False): # Raise error when trying to refresh a task that has not been saved if not self.saved: raise Task.NotSaved("Task needs to be saved to be refreshed") - # We need to use ID as backup for uuid here for the refreshes - # of newly saved tasks. Any other place in the code is fine - # with using UUID only. - args = [self['uuid'] or self['id'], 'export'] - new_data = json.loads(self.warrior.execute_command(args)[0]) + new_data = self.backend.refresh_task(self, after_save=after_save) + if only_fields: to_update = dict( [(k, new_data.get(k)) for k in only_fields]) @@ -649,67 +435,22 @@ class Task(TaskResource): else: self._load_data(new_data) -class TaskFilter(SerializingObject): - """ - A set of parameters to filter the task list with. - """ - - def __init__(self, filter_params=[]): - self.filter_params = filter_params - - def add_filter(self, filter_str): - self.filter_params.append(filter_str) - - def add_filter_param(self, key, value): - key = key.replace('__', '.') - - # Replace the value with empty string, since that is the - # convention in TW for empty values - attribute_key = key.split('.')[0] - - # Since this is user input, we need to normalize before we serialize - value = self._normalize(key, value) - value = self._serialize(attribute_key, value) - - # If we are filtering by uuid:, do not use uuid keyword - # due to TW-1452 bug - if key == 'uuid': - self.filter_params.insert(0, value) - else: - # Surround value with aphostrophes unless it's a empty string - value = "'%s'" % value if value else '' - - # We enforce equality match by using 'is' (or 'none') modifier - # Without using this syntax, filter fails due to TW-1479 - modifier = '.is' if value else '.none' - key = key + modifier if '.' not in key else key - - self.filter_params.append("{0}:{1}".format(key, value)) - - def get_filter_params(self): - return [f for f in self.filter_params if f] - - def clone(self): - c = self.__class__() - c.filter_params = list(self.filter_params) - return c - class TaskQuerySet(object): """ Represents a lazy lookup for a task objects. """ - def __init__(self, warrior=None, filter_obj=None): - self.warrior = warrior + def __init__(self, backend, filter_obj=None): + self.backend = backend self._result_cache = None - self.filter_obj = filter_obj or TaskFilter() + self.filter_obj = filter_obj or self.backend.filter_class(backend) def __deepcopy__(self, memo): """ Deep copy of a QuerySet doesn't populate the cache """ - obj = self.__class__() + obj = self.__class__(backend=self.backend) for k, v in self.__dict__.items(): if k in ('_iter', '_result_cache'): obj.__dict__[k] = None @@ -754,7 +495,7 @@ class TaskQuerySet(object): if klass is None: klass = self.__class__ filter_obj = self.filter_obj.clone() - c = klass(warrior=self.warrior, filter_obj=filter_obj) + c = klass(backend=self.backend, filter_obj=filter_obj) c.__dict__.update(kwargs) return c @@ -762,7 +503,7 @@ class TaskQuerySet(object): """ Fetch the tasks which match the current filters. """ - return self.warrior.filter_tasks(self.filter_obj) + return self.backend.filter_tasks(self.filter_obj) def all(self): """ @@ -776,6 +517,15 @@ class TaskQuerySet(object): def completed(self): return self.filter(status=COMPLETED) + def deleted(self): + return self.filter(status=DELETED) + + def waiting(self): + return self.filter(status=WAITING) + + def recurring(self): + return self.filter(status=RECURRING) + def filter(self, *args, **kwargs): """ Returns a new TaskQuerySet with the given filters added. @@ -803,81 +553,3 @@ class TaskQuerySet(object): raise ValueError( 'get() returned more than one Task -- it returned {0}! ' 'Lookup parameters were {1}'.format(num, kwargs)) - - -class TaskWarrior(object): - def __init__(self, data_location='~/.task', create=True): - data_location = os.path.expanduser(data_location) - if create and not os.path.exists(data_location): - os.makedirs(data_location) - self.config = { - 'data.location': os.path.expanduser(data_location), - 'confirmation': 'no', - 'dependency.confirmation': 'no', # See TW-1483 or taskrc man page - 'recurrence.confirmation': 'no', # Necessary for modifying R tasks - } - self.tasks = TaskQuerySet(self) - self.version = self._get_version() - - def _get_command_args(self, args, config_override={}): - command_args = ['task', 'rc:/'] - config = self.config.copy() - config.update(config_override) - for item in config.items(): - command_args.append('rc.{0}={1}'.format(*item)) - command_args.extend(map(str, args)) - return command_args - - def _get_version(self): - p = subprocess.Popen( - ['task', '--version'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - stdout, stderr = [x.decode('utf-8') for x in p.communicate()] - return stdout.strip('\n') - - def execute_command(self, args, config_override={}, allow_failure=True): - command_args = self._get_command_args( - args, config_override=config_override) - logger.debug(' '.join(command_args)) - p = subprocess.Popen(command_args, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - stdout, stderr = [x.decode('utf-8') for x in p.communicate()] - if p.returncode and allow_failure: - if stderr.strip(): - error_msg = stderr.strip().splitlines()[-1] - else: - error_msg = stdout.strip() - raise TaskWarriorException(error_msg) - return stdout.strip().split('\n') - - def enforce_recurrence(self): - # Run arbitrary report command which will trigger generation - # of recurrent tasks. - # TODO: Make a version dependant enforcement once - # TW-1531 is handled - self.execute_command(['next'], allow_failure=False) - - def filter_tasks(self, filter_obj): - self.enforce_recurrence() - args = ['export', '--'] + filter_obj.get_filter_params() - tasks = [] - for line in self.execute_command(args): - if line: - data = line.strip(',') - try: - filtered_task = Task(self) - filtered_task._load_data(json.loads(data)) - tasks.append(filtered_task) - except ValueError: - raise TaskWarriorException('Invalid JSON: %s' % data) - return tasks - - def merge_with(self, path, push=False): - path = path.rstrip('/') + '/' - self.execute_command(['merge', path], config_override={ - 'merge.autopush': 'yes' if push else 'no', - }) - - def undo(self): - self.execute_command(['undo'])