import json
import logging
import os
+import pytz
import six
import sys
import subprocess
+import tzlocal
DATE_FORMAT = '%Y%m%dT%H%M%SZ'
REPR_OUTPUT_SIZE = 10
VERSION_2_2_0 = six.u('2.2.0')
VERSION_2_3_0 = six.u('2.3.0')
VERSION_2_4_0 = six.u('2.4.0')
+VERSION_2_4_1 = six.u('2.4.1')
+VERSION_2_4_2 = six.u('2.4.2')
logger = logging.getLogger(__name__)
+local_zone = tzlocal.get_localzone()
class TaskWarriorException(Exception):
pass
+class ReadOnlyDictView(object):
+ """
+ Provides simplified read-only view upon dict object.
+ """
+
+ def __init__(self, viewed_dict):
+ self.viewed_dict = viewed_dict
+
+ def __getitem__(self, key):
+ return copy.deepcopy(self.viewed_dict.__getitem__(key))
+
+ def __contains__(self, k):
+ return self.viewed_dict.__contains__(k)
+
+ def __iter__(self):
+ for value in self.viewed_dict:
+ yield copy.deepcopy(value)
+
+ def __len__(self):
+ return len(self.viewed_dict)
+
+ def get(self, key, default=None):
+ return copy.deepcopy(self.viewed_dict.get(key, default))
+
+ def items(self):
+ return [copy.deepcopy(v) for v in self.viewed_dict.items()]
+
+ def values(self):
+ return [copy.deepcopy(v) for v in self.viewed_dict.values()]
+
+
class SerializingObject(object):
"""
Common ancestor for TaskResource & TaskFilter, since they both
need to serialize arguments.
+
+ Serializing method should hold the following contract:
+ - any empty value (meaning removal of the attribute)
+ is deserialized into a empty string
+ - None denotes a empty value for any attribute
+
+ Deserializing method should hold the following contract:
+ - None denotes a empty value for any attribute (however,
+ this is here as a safeguard, TaskWarrior currently does
+ not export empty-valued attributes) if the attribute
+ is not iterable (e.g. list or set), in which case
+ a empty iterable should be used.
+
+ Normalizing methods should hold the following contract:
+ - They are used to validate and normalize the user input.
+ Any attribute value that comes from the user (during Task
+ initialization, assignign values to Task attributes, or
+ filtering by user-provided values of attributes) is first
+ validated and normalized using the normalize_{key} method.
+ - If validation or normalization fails, normalizer is expected
+ to raise ValueError.
"""
def _deserialize(self, key, value):
lambda x: x if x is not None else '')
return dehydrate_func(value)
+ def _normalize(self, key, value):
+ """
+ Use normalize_<key> methods to normalize user input. Any user
+ input will be normalized at the moment it is used as filter,
+ or entered as a value of Task attribute.
+ """
+
+ # None value should not be converted by normalizer
+ if value is None:
+ return None
+
+ normalize_func = getattr(self, 'normalize_{0}'.format(key),
+ lambda x: x)
+
+ return normalize_func(value)
+
def timestamp_serializer(self, date):
if not date:
- return None
+ return ''
+
+ # Any serialized timestamp should be localized, we need to
+ # convert to UTC before converting to string (DATE_FORMAT uses UTC)
+ date = date.astimezone(pytz.utc)
+
return date.strftime(DATE_FORMAT)
def timestamp_deserializer(self, date_str):
if not date_str:
return None
- return datetime.datetime.strptime(date_str, DATE_FORMAT)
+
+ # Return timestamp localized in the local zone
+ naive_timestamp = datetime.datetime.strptime(date_str, DATE_FORMAT)
+ localized_timestamp = pytz.utc.localize(naive_timestamp)
+ return localized_timestamp.astimezone(local_zone)
def serialize_entry(self, value):
return self.timestamp_serializer(value)
def deserialize_entry(self, value):
return self.timestamp_deserializer(value)
+ def normalize_entry(self, value):
+ return self.datetime_normalizer(value)
+
def serialize_modified(self, value):
return self.timestamp_serializer(value)
def deserialize_modified(self, value):
return self.timestamp_deserializer(value)
+ def normalize_modified(self, value):
+ return self.datetime_normalizer(value)
+
+ def serialize_start(self, value):
+ return self.timestamp_serializer(value)
+
+ def deserialize_start(self, value):
+ return self.timestamp_deserializer(value)
+
+ def normalize_start(self, value):
+ return self.datetime_normalizer(value)
+
+ def serialize_end(self, value):
+ return self.timestamp_serializer(value)
+
+ def deserialize_end(self, value):
+ return self.timestamp_deserializer(value)
+
+ def normalize_end(self, value):
+ return self.datetime_normalizer(value)
+
def serialize_due(self, value):
return self.timestamp_serializer(value)
def deserialize_due(self, value):
return self.timestamp_deserializer(value)
+ def normalize_due(self, value):
+ return self.datetime_normalizer(value)
+
def serialize_scheduled(self, value):
return self.timestamp_serializer(value)
def deserialize_scheduled(self, value):
return self.timestamp_deserializer(value)
+ def normalize_scheduled(self, value):
+ return self.datetime_normalizer(value)
+
def serialize_until(self, value):
return self.timestamp_serializer(value)
def deserialize_until(self, value):
return self.timestamp_deserializer(value)
+ def normalize_until(self, value):
+ return self.datetime_normalizer(value)
+
def serialize_wait(self, value):
return self.timestamp_serializer(value)
def deserialize_wait(self, value):
return self.timestamp_deserializer(value)
+ def normalize_wait(self, value):
+ return self.datetime_normalizer(value)
+
+ def serialize_annotations(self, value):
+ value = value if value is not None else []
+
+ # This may seem weird, but it's correct, we want to export
+ # a list of dicts as serialized value
+ serialized_annotations = [json.loads(annotation.export_data())
+ for annotation in value]
+ return serialized_annotations if serialized_annotations else ''
+
def deserialize_annotations(self, data):
return [TaskAnnotation(self, d) for d in data] if data else []
return tags.split(',') if tags else []
return tags or []
- def serialize_depends(self, cur_dependencies):
+ def serialize_depends(self, value):
# Return the list of uuids
- return ','.join(task['uuid'] for task in cur_dependencies)
+ value = value if value is not None else set()
+ return ','.join(task['uuid'] for task in value)
def deserialize_depends(self, raw_uuids):
raw_uuids = raw_uuids or '' # Convert None to empty string
uuids = raw_uuids.split(',')
return set(self.warrior.tasks.get(uuid=uuid) for uuid in uuids if uuid)
+ def datetime_normalizer(self, value):
+ """
+ Normalizes date/datetime value (considered to come from user input)
+ to localized datetime value. Following conversions happen:
+
+ naive date -> localized datetime with the same date, and time=midnight
+ naive datetime -> localized datetime with the same value
+ localized datetime -> localized datetime (no conversion)
+ """
+
+ if (isinstance(value, datetime.date)
+ and not isinstance(value, datetime.datetime)):
+ # Convert to local midnight
+ value_full = datetime.datetime.combine(value, datetime.time.min)
+ localized = local_zone.localize(value_full)
+ elif isinstance(value, datetime.datetime) and value.tzinfo is None:
+ # Convert to localized datetime object
+ localized = local_zone.localize(value)
+ else:
+ # If the value is already localized, there is no need to change
+ # time zone at this point. Also None is a valid value too.
+ localized = value
+
+ return localized
+
+ def normalize_uuid(self, value):
+ # Enforce sane UUID
+ if not isinstance(value, six.string_types) or value == '':
+ raise ValueError("UUID must be a valid non-empty string, "
+ "not: {}".format(value))
+
+ return value
+
class TaskResource(SerializingObject):
read_only_fields = []
def __setitem__(self, key, value):
if key in self.read_only_fields:
raise RuntimeError('Field \'%s\' is read-only' % key)
+
+ # Normalize the user input before saving it
+ value = self._normalize(key, value)
self._data[key] = value
def __str__(self):
def __repr__(self):
return str(self)
+ def export_data(self):
+ """
+ Exports current data contained in the Task as JSON
+ """
+
+ # We need to remove spaces for TW-1504, use custom separators
+ data_tuples = ((key, self._serialize(key, value))
+ for key, value in six.iteritems(self._data))
+
+ # Empty string denotes empty serialized value, we do not want
+ # to pass that to TaskWarrior.
+ data_tuples = filter(lambda t: t[1] is not '', data_tuples)
+ data = dict(data_tuples)
+ return json.dumps(data, separators=(',',':'))
+
+ @property
+ def _modified_fields(self):
+ writable_fields = set(self._data.keys()) - set(self.read_only_fields)
+ for key in writable_fields:
+ new_value = self._data.get(key)
+ old_value = self._original_data.get(key)
+
+ # Make sure not to mark data removal as modified field if the
+ # field originally had some empty value
+ if key in self._data and not new_value and not old_value:
+ continue
+
+ if new_value != old_value:
+ yield key
+
+ @property
+ def modified(self):
+ return bool(list(self._modified_fields))
+
class TaskAnnotation(TaskResource):
read_only_fields = ['entry', 'description']
pass
@classmethod
- def from_input(cls, input_file=sys.stdin, modify=None):
+ def from_input(cls, input_file=sys.stdin, modify=None, warrior=None):
"""
Creates a Task object, directly from the stdin, by reading one line.
If modify=True, two lines are used, first line interpreted as the
but defaults to sys.stdin.
"""
- # TaskWarrior instance is set to None
- task = cls(None)
-
# Detect the hook type if not given directly
name = os.path.basename(sys.argv[0])
modify = name.startswith('on-modify') if modify is None else modify
+ # Create the TaskWarrior instance if none passed
+ if warrior is None:
+ hook_parent_dir = os.path.dirname(os.path.dirname(sys.argv[0]))
+ warrior = TaskWarrior(data_location=hook_parent_dir)
+
+ # TaskWarrior instance is set to None
+ task = cls(warrior)
+
# Load the data from the input
task._load_data(json.loads(input_file.readline().strip()))
# __init__ methods, that would be confusing
# Rather unfortunate syntax due to python2.6 comaptiblity
- self._load_data(dict((key, self._serialize(key, value))
- for (key, value) in six.iteritems(kwargs)))
+ self._data = dict((key, self._normalize(key, value))
+ for (key, value) in six.iteritems(kwargs))
+ self._original_data = copy.deepcopy(self._data)
+
+ # Provide read only access to the original data
+ self.original = ReadOnlyDictView(self._original_data)
def __unicode__(self):
return self['description']
# If the tasks are not saved, return hash of instance id
return id(self).__hash__()
- @property
- def _modified_fields(self):
- writable_fields = set(self._data.keys()) - set(self.read_only_fields)
- for key in writable_fields:
- new_value = self._data.get(key)
- old_value = self._original_data.get(key)
-
- # Make sure not to mark data removal as modified field if the
- # field originally had some empty value
- if key in self._data and not new_value and not old_value:
- continue
-
- if new_value != old_value:
- yield key
-
- @property
- def modified(self):
- return bool(list(self._modified_fields))
-
@property
def completed(self):
return self['status'] == six.text_type('completed')
def serialize_depends(self, cur_dependencies):
# Check that all the tasks are saved
- for task in cur_dependencies:
+ for task in (cur_dependencies or set()):
if not task.saved:
raise Task.NotSaved('Task \'%s\' needs to be saved before '
'it can be set as dependency.' % task)
self.warrior.execute_command([self['uuid'], 'delete'])
# Refresh the status again, so that we have updated info stored
+ self.refresh(only_fields=['status', 'start', 'end'])
+
+ def start(self):
+ if not self.saved:
+ raise Task.NotSaved("Task needs to be saved before it can be started")
+
+ # Refresh, and raise exception if task is already completed/deleted
self.refresh(only_fields=['status'])
+ if self.completed:
+ raise Task.CompletedTask("Cannot start a completed task")
+ elif self.deleted:
+ raise Task.DeletedTask("Deleted task cannot be started")
+
+ self.warrior.execute_command([self['uuid'], 'start'])
+
+ # Refresh the status again, so that we have updated info stored
+ self.refresh(only_fields=['status', 'start'])
def done(self):
if not self.saved:
self.warrior.execute_command([self['uuid'], 'done'])
# Refresh the status again, so that we have updated info stored
- self.refresh(only_fields=['status'])
+ self.refresh(only_fields=['status', 'start', 'end'])
def save(self):
if self.saved and not self.modified:
else:
self._load_data(new_data)
- def export_data(self):
- """
- Exports current data contained in the Task as JSON
- """
-
- # We need to remove spaces for TW-1504, use custom separators
- data_tuples = ((key, self._serialize(key, value))
- for key, value in six.iteritems(self._data))
-
- # Empty string denotes empty serialized value, we do not want
- # to pass that to TaskWarrior.
- data_tuples = filter(lambda t: t[1] is not '', data_tuples)
- data = dict(data_tuples)
- return json.dumps(data, separators=(',',':'))
-
class TaskFilter(SerializingObject):
"""
A set of parameters to filter the task list with.
# Replace the value with empty string, since that is the
# convention in TW for empty values
attribute_key = key.split('.')[0]
+
+ # Since this is user input, we need to normalize before we serialize
+ value = self._normalize(attribute_key, value)
value = self._serialize(attribute_key, value)
# If we are filtering by uuid:, do not use uuid keyword
class TaskWarrior(object):
- def __init__(self, data_location='~/.task', create=True):
+ def __init__(self, data_location='~/.task', create=True, taskrc_location='~/.taskrc'):
data_location = os.path.expanduser(data_location)
+ self.taskrc_location = os.path.expanduser(taskrc_location)
+
+ # If taskrc does not exist, pass / to use defaults and avoid creating
+ # dummy .taskrc file by TaskWarrior
+ if not os.path.exists(self.taskrc_location):
+ self.taskrc_location = '/'
+
if create and not os.path.exists(data_location):
os.makedirs(data_location)
self.config = {
'data.location': os.path.expanduser(data_location),
'confirmation': 'no',
- 'dependency.confirmation': 'no', # See TW-1483 or taskrc man page
+ 'dependency.confirmation': 'no', # See TW-1483 or taskrc man page
+ 'recurrence.confirmation': 'no', # Necessary for modifying R tasks
}
self.tasks = TaskQuerySet(self)
self.version = self._get_version()
def _get_command_args(self, args, config_override={}):
- command_args = ['task', 'rc:/']
+ command_args = ['task', 'rc:{0}'.format(self.taskrc_location)]
config = self.config.copy()
config.update(config_override)
for item in config.items():
stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
return stdout.strip('\n')
- def execute_command(self, args, config_override={}):
+ def execute_command(self, args, config_override={}, allow_failure=True):
command_args = self._get_command_args(
args, config_override=config_override)
logger.debug(' '.join(command_args))
p = subprocess.Popen(command_args, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
- if p.returncode:
+ if p.returncode and allow_failure:
if stderr.strip():
- error_msg = stderr.strip().splitlines()[-1]
+ error_msg = stderr.strip()
else:
error_msg = stdout.strip()
raise TaskWarriorException(error_msg)
return stdout.strip().split('\n')
+ def enforce_recurrence(self):
+ # Run arbitrary report command which will trigger generation
+ # of recurrent tasks.
+
+ # Only necessary for TW up to 2.4.1, fixed in 2.4.2.
+ if self.version < VERSION_2_4_2:
+ self.execute_command(['next'], allow_failure=False)
+
def filter_tasks(self, filter_obj):
+ self.enforce_recurrence()
args = ['export', '--'] + filter_obj.get_filter_params()
tasks = []
for line in self.execute_command(args):