import logging
import os
import pytz
-import re
import six
import sys
-import subprocess
import tzlocal
+from backends import TaskWarrior, TaskWarriorException
+
DATE_FORMAT = '%Y%m%dT%H%M%SZ'
DATE_FORMAT_CALC = '%Y-%m-%dT%H:%M:%S'
REPR_OUTPUT_SIZE = 10
PENDING = 'pending'
COMPLETED = 'completed'
-VERSION_2_1_0 = six.u('2.1.0')
-VERSION_2_2_0 = six.u('2.2.0')
-VERSION_2_3_0 = six.u('2.3.0')
-VERSION_2_4_0 = six.u('2.4.0')
-VERSION_2_4_1 = six.u('2.4.1')
-VERSION_2_4_2 = six.u('2.4.2')
-VERSION_2_4_3 = six.u('2.4.3')
-
logger = logging.getLogger(__name__)
local_zone = tzlocal.get_localzone()
-class TaskWarriorException(Exception):
- pass
-
-
class ReadOnlyDictView(object):
"""
Provides simplified read-only view upon dict object.
class TaskAnnotation(TaskResource):
read_only_fields = ['entry', 'description']
- def __init__(self, task, data={}):
+ def __init__(self, task, data=None):
self.task = task
- self._load_data(data)
+ self._load_data(data or dict())
super(TaskAnnotation, self).__init__(task.warrior)
def remove(self):
# Refreshing is very important here, as not only modification time
# is updated, but arbitrary attribute may have changed due hooks
# altering the data before saving
- self.refresh()
+ self.refresh(after_save=True)
def add_annotation(self, annotation):
if not self.saved:
return args
- def refresh(self, only_fields=[]):
+ def refresh(self, only_fields=None, after_save=False):
# Raise error when trying to refresh a task that has not been saved
if not self.saved:
raise Task.NotSaved("Task needs to be saved to be refreshed")
# of newly saved tasks. Any other place in the code is fine
# with using UUID only.
args = [self['uuid'] or self['id'], 'export']
- new_data = json.loads(self.warrior.execute_command(args)[0])
+ output = self.warrior.execute_command(args)
+
+ def valid(output):
+ return len(output) == 1 and output[0].startswith('{')
+
+ # For older TW versions attempt to uniquely locate the task
+ # using the data we have if it has been just saved.
+ # This can happen when adding a completed task on older TW versions.
+ if (not valid(output) and self.warrior.version < VERSION_2_4_5
+ and after_save):
+
+ # Make a copy, removing ID and UUID. It's most likely invalid
+ # (ID 0) if it failed to match a unique task.
+ data = copy.deepcopy(self._data)
+ data.pop('id', None)
+ data.pop('uuid', None)
+
+ taskfilter = TaskFilter(self.warrior)
+ for key, value in data.items():
+ taskfilter.add_filter_param(key, value)
+
+ output = self.warrior.execute_command(['export', '--'] +
+ taskfilter.get_filter_params())
+
+ # If more than 1 task has been matched still, raise an exception
+ if not valid(output):
+ raise TaskWarriorException(
+ "Unique identifiers {0} with description: {1} matches "
+ "multiple tasks: {2}".format(
+ self['uuid'] or self['id'], self['description'], output)
+ )
+
+ new_data = json.loads(output[0])
if only_fields:
to_update = dict(
[(k, new_data.get(k)) for k in only_fields])
A set of parameters to filter the task list with.
"""
- def __init__(self, warrior, filter_params=[]):
- self.filter_params = filter_params
+ def __init__(self, warrior, filter_params=None):
+ self.filter_params = filter_params or []
super(TaskFilter, self).__init__(warrior)
def add_filter(self, filter_str):
# We enforce equality match by using 'is' (or 'none') modifier
# Without using this syntax, filter fails due to TW-1479
- modifier = '.is' if value else '.none'
- key = key + modifier if '.' not in key else key
+ # which is, however, fixed in 2.4.5
+ if self.warrior.version < VERSION_2_4_5:
+ modifier = '.is' if value else '.none'
+ key = key + modifier if '.' not in key else key
self.filter_params.append(six.u("{0}:{1}").format(key, value))
raise ValueError(
'get() returned more than one Task -- it returned {0}! '
'Lookup parameters were {1}'.format(num, kwargs))
-
-
-class TaskWarrior(object):
- def __init__(self, data_location=None, create=True, taskrc_location='~/.taskrc'):
- self.taskrc_location = os.path.expanduser(taskrc_location)
-
- # If taskrc does not exist, pass / to use defaults and avoid creating
- # dummy .taskrc file by TaskWarrior
- if not os.path.exists(self.taskrc_location):
- self.taskrc_location = '/'
-
- self.version = self._get_version()
- self.config = {
- 'confirmation': 'no',
- 'dependency.confirmation': 'no', # See TW-1483 or taskrc man page
- 'recurrence.confirmation': 'no', # Necessary for modifying R tasks
-
- # Defaults to on since 2.4.5, we expect off during parsing
- 'json.array': 'off',
-
- # 2.4.3 onwards supports 0 as infite bulk, otherwise set just
- # arbitrary big number which is likely to be large enough
- 'bulk': 0 if self.version >= VERSION_2_4_3 else 100000,
- }
-
- # Set data.location override if passed via kwarg
- if data_location is not None:
- data_location = os.path.expanduser(data_location)
- if create and not os.path.exists(data_location):
- os.makedirs(data_location)
- self.config['data.location'] = data_location
-
- self.tasks = TaskQuerySet(self)
-
- def _get_command_args(self, args, config_override={}):
- command_args = ['task', 'rc:{0}'.format(self.taskrc_location)]
- config = self.config.copy()
- config.update(config_override)
- for item in config.items():
- command_args.append('rc.{0}={1}'.format(*item))
- command_args.extend(map(six.text_type, args))
- return command_args
-
- def _get_version(self):
- p = subprocess.Popen(
- ['task', '--version'],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
- return stdout.strip('\n')
-
- def get_config(self):
- raw_output = self.execute_command(
- ['show'],
- config_override={'verbose': 'nothing'}
- )
-
- config = dict()
- config_regex = re.compile(r'^(?P<key>[^\s]+)\s+(?P<value>[^\s].+$)')
-
- for line in raw_output:
- match = config_regex.match(line)
- if match:
- config[match.group('key')] = match.group('value').strip()
-
- return config
-
- def execute_command(self, args, config_override={}, allow_failure=True,
- return_all=False):
- command_args = self._get_command_args(
- args, config_override=config_override)
- logger.debug(' '.join(command_args))
- p = subprocess.Popen(command_args, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
- if p.returncode and allow_failure:
- if stderr.strip():
- error_msg = stderr.strip()
- else:
- error_msg = stdout.strip()
- raise TaskWarriorException(error_msg)
-
- # Return all whole triplet only if explicitly asked for
- if not return_all:
- return stdout.rstrip().split('\n')
- else:
- return (stdout.rstrip().split('\n'),
- stderr.rstrip().split('\n'),
- p.returncode)
-
- def enforce_recurrence(self):
- # Run arbitrary report command which will trigger generation
- # of recurrent tasks.
-
- # Only necessary for TW up to 2.4.1, fixed in 2.4.2.
- if self.version < VERSION_2_4_2:
- self.execute_command(['next'], allow_failure=False)
-
- def filter_tasks(self, filter_obj):
- self.enforce_recurrence()
- args = ['export', '--'] + filter_obj.get_filter_params()
- tasks = []
- for line in self.execute_command(args):
- if line:
- data = line.strip(',')
- try:
- filtered_task = Task(self)
- filtered_task._load_data(json.loads(data))
- tasks.append(filtered_task)
- except ValueError:
- raise TaskWarriorException('Invalid JSON: %s' % data)
- return tasks
-
- def merge_with(self, path, push=False):
- path = path.rstrip('/') + '/'
- self.execute_command(['merge', path], config_override={
- 'merge.autopush': 'yes' if push else 'no',
- })
-
- def undo(self):
- self.execute_command(['undo'])