]> git.madduck.net Git - etc/taskwarrior.git/blob - tasklib/backends.py

madduck's git repository

Every one of the projects in this repository is available at the canonical URL git://git.madduck.net/madduck/pub/<projectpath> — see each project's metadata for the exact URL.

All patches and comments are welcome. Please squash your changes to logical commits before using git-format-patch and git-send-email to patches@git.madduck.net. If you'd read over the Git project's submission guidelines and adhered to them, I'd be especially grateful.

SSH access, as well as push access can be individually arranged.

If you use my repositories frequently, consider adding the following snippet to ~/.gitconfig and using the third clone URL listed for each project:

[url "git://git.madduck.net/madduck/"]
  insteadOf = madduck:

Task: Conversion of modified fields to TW Cli args is TW-specific, move to backend
[etc/taskwarrior.git] / tasklib / backends.py
1 import abc
2 import json
3 import os
4 import re
5 import subprocess
6
7 from tasklib.task import TaskFilter
8
9 VERSION_2_1_0 = six.u('2.1.0')
10 VERSION_2_2_0 = six.u('2.2.0')
11 VERSION_2_3_0 = six.u('2.3.0')
12 VERSION_2_4_0 = six.u('2.4.0')
13 VERSION_2_4_1 = six.u('2.4.1')
14 VERSION_2_4_2 = six.u('2.4.2')
15 VERSION_2_4_3 = six.u('2.4.3')
16 VERSION_2_4_4 = six.u('2.4.4')
17 VERSION_2_4_5 = six.u('2.4.5')
18
19
20 class Backend(object):
21
22     filter_class = TaskFilter
23
24     @abc.abstractmethod
25     def filter_tasks(self, filter_obj):
26         """Returns a list of Task objects matching the given filter"""
27         pass
28
29     @abc.abstractmethod
30     def save_task(self, task):
31         pass
32
33     @abc.abstractmethod
34     def delete_task(self, task):
35         pass
36
37     @abc.abstractmethod
38     def start_task(self, task):
39         pass
40
41     @abc.abstractmethod
42     def stop_task(self, task):
43         pass
44
45     @abc.abstractmethod
46     def complete_task(self, task):
47         pass
48
49     @abc.abstractmethod
50     def refresh_task(self, task, after_save=False):
51         """
52         Refreshes the given task. Returns new data dict with serialized
53         attributes.
54         """
55         pass
56
57     @abc.abstractmethod
58     def annotate_task(self, task, annotation):
59         pass
60
61     @abc.abstractmethod
62     def denotate_task(self, task, annotation):
63         pass
64
65     @abc.abstractmethod
66     def sync(self):
67         """Syncs the backend database with the taskd server"""
68         pass
69
70
71 class TaskWarriorException(Exception):
72     pass
73
74
75 class TaskWarrior(object):
76     def __init__(self, data_location=None, create=True, taskrc_location='~/.taskrc'):
77         self.taskrc_location = os.path.expanduser(taskrc_location)
78
79         # If taskrc does not exist, pass / to use defaults and avoid creating
80         # dummy .taskrc file by TaskWarrior
81         if not os.path.exists(self.taskrc_location):
82             self.taskrc_location = '/'
83
84         self.version = self._get_version()
85         self.config = {
86             'confirmation': 'no',
87             'dependency.confirmation': 'no',  # See TW-1483 or taskrc man page
88             'recurrence.confirmation': 'no',  # Necessary for modifying R tasks
89
90             # Defaults to on since 2.4.5, we expect off during parsing
91             'json.array': 'off',
92
93             # 2.4.3 onwards supports 0 as infite bulk, otherwise set just
94             # arbitrary big number which is likely to be large enough
95             'bulk': 0 if self.version >= VERSION_2_4_3 else 100000,
96         }
97
98         # Set data.location override if passed via kwarg
99         if data_location is not None:
100             data_location = os.path.expanduser(data_location)
101             if create and not os.path.exists(data_location):
102                 os.makedirs(data_location)
103             self.config['data.location'] = data_location
104
105         self.tasks = TaskQuerySet(self)
106
107     def _get_command_args(self, args, config_override=None):
108         command_args = ['task', 'rc:{0}'.format(self.taskrc_location)]
109         config = self.config.copy()
110         config.update(config_override or dict())
111         for item in config.items():
112             command_args.append('rc.{0}={1}'.format(*item))
113         command_args.extend(map(six.text_type, args))
114         return command_args
115
116     def _get_version(self):
117         p = subprocess.Popen(
118                 ['task', '--version'],
119                 stdout=subprocess.PIPE,
120                 stderr=subprocess.PIPE)
121         stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
122         return stdout.strip('\n')
123
124     def _get_modified_task_fields_as_args(self, task):
125         args = []
126
127         def add_field(field):
128             # Add the output of format_field method to args list (defaults to
129             # field:value)
130             serialized_value = task._serialize(field, task._data[field])
131
132             # Empty values should not be enclosed in quotation marks, see
133             # TW-1510
134             if serialized_value is '':
135                 escaped_serialized_value = ''
136             else:
137                 escaped_serialized_value = six.u("'{0}'").format(serialized_value)
138
139             format_default = lambda: six.u("{0}:{1}").format(field,
140                                                       escaped_serialized_value)
141
142             format_func = getattr(task, 'format_{0}'.format(field),
143                                   format_default)
144
145             args.append(format_func())
146
147         # If we're modifying saved task, simply pass on all modified fields
148         if task.saved:
149             for field in task._modified_fields:
150                 add_field(field)
151         # For new tasks, pass all fields that make sense
152         else:
153             for field in task._data.keys():
154                 if field in task.read_only_fields:
155                     continue
156                 add_field(field)
157
158         return args
159
160
161     def get_config(self):
162         raw_output = self.execute_command(
163                 ['show'],
164                 config_override={'verbose': 'nothing'}
165             )
166
167         config = dict()
168         config_regex = re.compile(r'^(?P<key>[^\s]+)\s+(?P<value>[^\s].+$)')
169
170         for line in raw_output:
171             match = config_regex.match(line)
172             if match:
173                 config[match.group('key')] = match.group('value').strip()
174
175         return config
176
177     def execute_command(self, args, config_override=None, allow_failure=True,
178                         return_all=False):
179         command_args = self._get_command_args(
180             args, config_override=config_override)
181         logger.debug(' '.join(command_args))
182         p = subprocess.Popen(command_args, stdout=subprocess.PIPE,
183                              stderr=subprocess.PIPE)
184         stdout, stderr = [x.decode('utf-8') for x in p.communicate()]
185         if p.returncode and allow_failure:
186             if stderr.strip():
187                 error_msg = stderr.strip()
188             else:
189                 error_msg = stdout.strip()
190             raise TaskWarriorException(error_msg)
191
192         # Return all whole triplet only if explicitly asked for
193         if not return_all:
194             return stdout.rstrip().split('\n')
195         else:
196             return (stdout.rstrip().split('\n'),
197                     stderr.rstrip().split('\n'),
198                     p.returncode)
199
200     def enforce_recurrence(self):
201         # Run arbitrary report command which will trigger generation
202         # of recurrent tasks.
203
204         # Only necessary for TW up to 2.4.1, fixed in 2.4.2.
205         if self.version < VERSION_2_4_2:
206             self.execute_command(['next'], allow_failure=False)
207
208     def merge_with(self, path, push=False):
209         path = path.rstrip('/') + '/'
210         self.execute_command(['merge', path], config_override={
211             'merge.autopush': 'yes' if push else 'no',
212         })
213
214     def undo(self):
215         self.execute_command(['undo'])
216
217     # Backend interface implementation
218
219     def filter_tasks(self, filter_obj):
220         self.enforce_recurrence()
221         args = ['export', '--'] + filter_obj.get_filter_params()
222         tasks = []
223         for line in self.execute_command(args):
224             if line:
225                 data = line.strip(',')
226                 try:
227                     filtered_task = Task(self)
228                     filtered_task._load_data(json.loads(data))
229                     tasks.append(filtered_task)
230                 except ValueError:
231                     raise TaskWarriorException('Invalid JSON: %s' % data)
232         return tasks
233
234     def save_task(self, task):
235         """Save a task into TaskWarrior database using add/modify call"""
236
237         args = [task['uuid'], 'modify'] if task.saved else ['add']
238         args.extend(self._get_modified_task_fields_as_args(task))
239         output = self.execute_command(args)
240
241         # Parse out the new ID, if the task is being added for the first time
242         if not task.saved:
243             id_lines = [l for l in output if l.startswith('Created task ')]
244
245             # Complain loudly if it seems that more tasks were created
246             # Should not happen
247             if len(id_lines) != 1 or len(id_lines[0].split(' ')) != 3:
248                 raise TaskWarriorException("Unexpected output when creating "
249                                            "task: %s" % '\n'.join(id_lines))
250
251             # Circumvent the ID storage, since ID is considered read-only
252             identifier = id_lines[0].split(' ')[2].rstrip('.')
253
254             # Identifier can be either ID or UUID for completed tasks
255             try:
256                 task._data['id'] = int(identifier)
257             except ValueError:
258                 task._data['uuid'] = identifier
259
260         # Refreshing is very important here, as not only modification time
261         # is updated, but arbitrary attribute may have changed due hooks
262         # altering the data before saving
263         task.refresh(after_save=True)
264
265     def delete_task(self, task):
266         self.execute_command([task['uuid'], 'delete'])
267
268     def start_task(self, task):
269         self.execute_command([task['uuid'], 'start'])
270
271     def stop_task(self, task):
272         self.execute_command([task['uuid'], 'stop'])
273
274     def complete_task(self, task):
275         # Older versions of TW do not stop active task at completion
276         if self.version < VERSION_2_4_0 and task.active:
277             task.stop()
278
279         self.execute_command([task['uuid'], 'done'])
280
281     def annotate_task(self, task, annotation):
282         args = [task['uuid'], 'annotate', annotation]
283         self.execute_command(args)
284
285     def denotate_task(self, task, annotation):
286         args = [task['uuid'], 'denotate', annotation]
287         self.execute_command(args)
288
289     def refresh_task(self, task, after_save=False):
290         # We need to use ID as backup for uuid here for the refreshes
291         # of newly saved tasks. Any other place in the code is fine
292         # with using UUID only.
293         args = [task['uuid'] or task['id'], 'export']
294         output = self.execute_command(args)
295
296         def valid(output):
297             return len(output) == 1 and output[0].startswith('{')
298
299         # For older TW versions attempt to uniquely locate the task
300         # using the data we have if it has been just saved.
301         # This can happen when adding a completed task on older TW versions.
302         if (not valid(output) and self.version < VERSION_2_4_5
303                 and after_save):
304
305             # Make a copy, removing ID and UUID. It's most likely invalid
306             # (ID 0) if it failed to match a unique task.
307             data = copy.deepcopy(task._data)
308             data.pop('id', None)
309             data.pop('uuid', None)
310
311             taskfilter = self.filter_class(self)
312             for key, value in data.items():
313                 taskfilter.add_filter_param(key, value)
314
315             output = self.execute_command(['export', '--'] +
316                 taskfilter.get_filter_params())
317
318         # If more than 1 task has been matched still, raise an exception
319         if not valid(output):
320             raise TaskWarriorException(
321                 "Unique identifiers {0} with description: {1} matches "
322                 "multiple tasks: {2}".format(
323                 task['uuid'] or task['id'], task['description'], output)
324             )
325
326         return json.loads(output[0])
327
328     def sync(self):
329         self.execute_command(['sync'])