Left: | ||
Right: |
LEFT | RIGHT |
---|---|
1 # -*- coding: utf-8 -*- | 1 # -*- coding: utf-8 -*- |
2 """Implementation of a SQLite storage file.""" | 2 """SQLite-based storage.""" |
3 | 3 |
4 from __future__ import unicode_literals | 4 from __future__ import unicode_literals |
5 | 5 |
6 import logging | 6 import logging |
7 import os | 7 import os |
8 import sqlite3 | 8 import sqlite3 |
9 import zlib | 9 import zlib |
10 | 10 |
11 from plaso.containers import artifacts | |
12 from plaso.containers import errors | |
13 from plaso.containers import event_sources | |
14 from plaso.containers import events | |
15 from plaso.containers import reports | |
11 from plaso.containers import sessions | 16 from plaso.containers import sessions |
17 from plaso.containers import tasks | |
12 from plaso.lib import definitions | 18 from plaso.lib import definitions |
13 from plaso.storage import interface | |
Joachim Metz
2018/01/02 18:11:27
alphabetical order
onager
2018/01/03 19:39:10
Done.
| |
14 from plaso.storage import event_heaps | 19 from plaso.storage import event_heaps |
15 from plaso.storage import identifiers | 20 from plaso.storage import identifiers |
21 from plaso.storage import interface | |
16 | 22 |
17 | 23 |
18 class SQLiteStorageFile(interface.BaseStorageFile): | 24 class SQLiteStorageFile(interface.BaseStorageFile): |
19 """SQLite-based storage file. | 25 """SQLite-based storage file. |
20 | 26 |
21 Attributes: | 27 Attributes: |
22 format_version (int): storage format version. | 28 format_version (int): storage format version. |
23 serialization_format (str): serialization format. | 29 serialization_format (str): serialization format. |
24 storage_type (str): storage type. | 30 storage_type (str): storage type. |
25 """ | 31 """ |
26 | 32 |
27 _FORMAT_VERSION = 20170707 | 33 _FORMAT_VERSION = 20180101 |
28 | 34 |
29 # The earliest format version, stored in-file, that this class | 35 # The earliest format version, stored in-file, that this class |
30 # is able to read. | 36 # is able to read. |
31 _COMPATIBLE_FORMAT_VERSION = 20170707 | 37 _COMPATIBLE_FORMAT_VERSION = 20170707 |
32 | 38 |
39 _CONTAINER_TYPE_ANALYSIS_REPORT = reports.AnalysisReport.CONTAINER_TYPE | |
40 _CONTAINER_TYPE_EVENT = events.EventObject.CONTAINER_TYPE | |
41 _CONTAINER_TYPE_EVENT_DATA = events.EventData.CONTAINER_TYPE | |
42 _CONTAINER_TYPE_EVENT_SOURCE = event_sources.EventSource.CONTAINER_TYPE | |
43 _CONTAINER_TYPE_EVENT_TAG = events.EventTag.CONTAINER_TYPE | |
44 _CONTAINER_TYPE_EXTRACTION_ERROR = errors.ExtractionError.CONTAINER_TYPE | |
45 _CONTAINER_TYPE_SESSION_COMPLETION = sessions.SessionCompletion.CONTAINER_TYPE | |
46 _CONTAINER_TYPE_SESSION_START = sessions.SessionStart.CONTAINER_TYPE | |
47 _CONTAINER_TYPE_SYSTEM_CONFIGURATION = ( | |
48 artifacts.SystemConfigurationArtifact.CONTAINER_TYPE) | |
49 _CONTAINER_TYPE_TASK_COMPLETION = tasks.TaskCompletion.CONTAINER_TYPE | |
50 _CONTAINER_TYPE_TASK_START = tasks.TaskStart.CONTAINER_TYPE | |
51 | |
33 _CONTAINER_TYPES = ( | 52 _CONTAINER_TYPES = ( |
34 'analysis_report', 'extraction_error', 'event', 'event_data', | 53 _CONTAINER_TYPE_ANALYSIS_REPORT, |
35 'event_source', 'event_tag', 'session_completion', 'session_start', | 54 _CONTAINER_TYPE_EXTRACTION_ERROR, |
36 'system_configuration', 'task_completion', 'task_start') | 55 _CONTAINER_TYPE_EVENT, |
56 _CONTAINER_TYPE_EVENT_DATA, | |
57 _CONTAINER_TYPE_EVENT_SOURCE, | |
58 _CONTAINER_TYPE_EVENT_TAG, | |
59 _CONTAINER_TYPE_SESSION_COMPLETION, | |
60 _CONTAINER_TYPE_SESSION_START, | |
61 _CONTAINER_TYPE_SYSTEM_CONFIGURATION, | |
62 _CONTAINER_TYPE_TASK_COMPLETION, | |
63 _CONTAINER_TYPE_TASK_START) | |
37 | 64 |
38 _CREATE_METADATA_TABLE_QUERY = ( | 65 _CREATE_METADATA_TABLE_QUERY = ( |
39 'CREATE TABLE metadata (key TEXT, value TEXT);') | 66 'CREATE TABLE metadata (key TEXT, value TEXT);') |
40 | 67 |
41 _CREATE_TABLE_QUERY = ( | 68 _CREATE_TABLE_QUERY = ( |
42 'CREATE TABLE {0:s} (' | 69 'CREATE TABLE {0:s} (' |
43 '_identifier INTEGER PRIMARY KEY AUTOINCREMENT,' | 70 '_identifier INTEGER PRIMARY KEY AUTOINCREMENT,' |
44 '_data {1:s});') | 71 '_data {1:s});') |
45 | 72 |
46 _CREATE_EVENT_TABLE_QUERY = ( | 73 _CREATE_EVENT_TABLE_QUERY = ( |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
123 def _AddSerializedEvent(self, event): | 150 def _AddSerializedEvent(self, event): |
124 """Adds an serialized event. | 151 """Adds an serialized event. |
125 | 152 |
126 Args: | 153 Args: |
127 event (EventObject): event. | 154 event (EventObject): event. |
128 | 155 |
129 Raises: | 156 Raises: |
130 IOError: if the event cannot be serialized. | 157 IOError: if the event cannot be serialized. |
131 """ | 158 """ |
132 identifier = identifiers.SQLTableIdentifier( | 159 identifier = identifiers.SQLTableIdentifier( |
133 'event', self._serialized_event_heap.number_of_events + 1) | 160 self._CONTAINER_TYPE_EVENT, |
161 self._serialized_event_heap.number_of_events + 1) | |
134 event.SetIdentifier(identifier) | 162 event.SetIdentifier(identifier) |
135 | 163 |
136 serialized_data = self._SerializeAttributeContainer(event) | 164 serialized_data = self._SerializeAttributeContainer(event) |
137 | 165 |
138 self._serialized_event_heap.PushEvent(event.timestamp, serialized_data) | 166 self._serialized_event_heap.PushEvent(event.timestamp, serialized_data) |
139 | 167 |
140 if self._serialized_event_heap.data_size > self._maximum_buffer_size: | 168 if self._serialized_event_heap.data_size > self._maximum_buffer_size: |
141 self._WriteSerializedAttributeContainerList('event') | 169 self._WriteSerializedAttributeContainerList(self._CONTAINER_TYPE_EVENT) |
142 | 170 |
143 @classmethod | 171 @classmethod |
144 def _CheckStorageMetadata(cls, metadata_values): | 172 def _CheckStorageMetadata(cls, metadata_values): |
145 """Checks the storage metadata. | 173 """Checks the storage metadata. |
146 | 174 |
147 Args: | 175 Args: |
148 metadata_values (dict[str, str]): metadata values per key. | 176 metadata_values (dict[str, str]): metadata values per key. |
149 | 177 |
150 Raises: | 178 Raises: |
151 IOError: if the format version or the serializer format is not supported. | 179 IOError: if the format version or the serializer format is not supported. |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
187 raise IOError('Unsupported storage type: {0:s}'.format( | 215 raise IOError('Unsupported storage type: {0:s}'.format( |
188 storage_type)) | 216 storage_type)) |
189 | 217 |
190 def _GetAttributeContainerByIndex(self, container_type, index): | 218 def _GetAttributeContainerByIndex(self, container_type, index): |
191 """Retrieves a specific attribute container. | 219 """Retrieves a specific attribute container. |
192 | 220 |
193 Args: | 221 Args: |
194 container_type (str): attribute container type. | 222 container_type (str): attribute container type. |
195 index (int): attribute container index. | 223 index (int): attribute container index. |
196 | 224 |
197 Returns: | 225 Returnes: |
198 AttributeContainer: attribute container or None if not available. | 226 AttributeContainer: attribute container or None if not available. |
199 """ | 227 """ |
200 sequence_number = index + 1 | 228 sequence_number = index + 1 |
201 query = 'SELECT _data FROM {0:s} WHERE rowid = {1:d}'.format( | 229 query = 'SELECT _data FROM {0:s} WHERE rowid = {1:d}'.format( |
202 container_type, sequence_number) | 230 container_type, sequence_number) |
203 self._cursor.execute(query) | 231 self._cursor.execute(query) |
204 | 232 |
205 row = self._cursor.fetchone() | 233 row = self._cursor.fetchone() |
206 if row: | 234 if row: |
207 identifier = identifiers.SQLTableIdentifier( | 235 identifier = identifiers.SQLTableIdentifier( |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
266 else: | 294 else: |
267 serialized_data = row[1] | 295 serialized_data = row[1] |
268 | 296 |
269 attribute_container = self._DeserializeAttributeContainer( | 297 attribute_container = self._DeserializeAttributeContainer( |
270 container_type, serialized_data) | 298 container_type, serialized_data) |
271 attribute_container.SetIdentifier(identifier) | 299 attribute_container.SetIdentifier(identifier) |
272 yield attribute_container | 300 yield attribute_container |
273 | 301 |
274 row = cursor.fetchone() | 302 row = cursor.fetchone() |
275 | 303 |
304 def _HasAttributeContainers(self, container_type): | |
305 """Determines if a store contains a specific type of attribute containers. | |
306 | |
307 Args: | |
308 container_type (str): attribute container type. | |
309 | |
310 Returns: | |
311 bool: True if the store contains the specified type of attribute | |
312 containers. | |
313 """ | |
314 query = 'SELECT COUNT(*) FROM {0:s}'.format(container_type) | |
315 self._cursor.execute(query) | |
316 | |
317 row = self._cursor.fetchone() | |
318 return row and row[0] != 0 | |
319 | |
276 def _HasTable(self, table_name): | 320 def _HasTable(self, table_name): |
277 """Determines if a specific table exists. | 321 """Determines if a specific table exists. |
278 | 322 |
279 Args: | 323 Args: |
280 table_name (str): name of the table. | 324 table_name (str): name of the table. |
281 | 325 |
282 Returns: | 326 Returns: |
283 True if the table exists, false otherwise. | 327 True if the table exists, false otherwise. |
284 """ | 328 """ |
285 query = self._HAS_TABLE_QUERY.format(table_name) | 329 query = self._HAS_TABLE_QUERY.format(table_name) |
286 | 330 |
287 self._cursor.execute(query) | 331 self._cursor.execute(query) |
288 return bool(self._cursor.fetchone()) | 332 return bool(self._cursor.fetchone()) |
333 | |
334 def _ReadEventDataIntoEvent(self, event): | |
335 """Reads event data into the event. | |
336 | |
337 This function is intended to offer backwards compatible event behavior. | |
338 | |
339 Args: | |
340 event (EventObject): event. | |
341 """ | |
342 if self.storage_type != definitions.STORAGE_TYPE_SESSION: | |
343 return | |
344 | |
345 if not hasattr(event, 'event_data_row_identifier'): | |
346 return | |
347 | |
348 event_data_identifier = identifiers.SQLTableIdentifier( | |
349 self._CONTAINER_TYPE_EVENT_DATA, event.event_data_row_identifier) | |
350 event.SetEventDataIdentifier(event_data_identifier) | |
351 | |
352 event_data = self._GetAttributeContainerByIndex( | |
353 self._CONTAINER_TYPE_EVENT_DATA, event.event_data_row_identifier - 1) | |
354 if not event_data: | |
355 return | |
356 | |
357 for attribute_name, attribute_value in event_data.GetAttributes(): | |
358 setattr(event, attribute_name, attribute_value) | |
359 | |
360 del event.event_data_row_identifier | |
289 | 361 |
290 def _ReadStorageMetadata(self): | 362 def _ReadStorageMetadata(self): |
291 """Reads the storage metadata. | 363 """Reads the storage metadata. |
292 | 364 |
293 Returns: | 365 Returns: |
294 bool: True if the storage metadata was read. | 366 bool: True if the storage metadata was read. |
295 """ | 367 """ |
296 query = 'SELECT key, value FROM metadata' | 368 query = 'SELECT key, value FROM metadata' |
297 self._cursor.execute(query) | 369 self._cursor.execute(query) |
298 | 370 |
299 metadata_values = {row[0]: row[1] for row in self._cursor.fetchall()} | 371 metadata_values = {row[0]: row[1] for row in self._cursor.fetchall()} |
300 | 372 |
301 SQLiteStorageFile._CheckStorageMetadata(metadata_values) | 373 SQLiteStorageFile._CheckStorageMetadata(metadata_values) |
302 | 374 |
303 self.format_version = metadata_values['format_version'] | 375 self.format_version = metadata_values['format_version'] |
304 self.compression_format = metadata_values['compression_format'] | 376 self.compression_format = metadata_values['compression_format'] |
305 self.serialization_format = metadata_values['serialization_format'] | 377 self.serialization_format = metadata_values['serialization_format'] |
306 self.storage_type = metadata_values['storage_type'] | 378 self.storage_type = metadata_values['storage_type'] |
307 | 379 |
308 def _WriteAttributeContainer(self, attribute_container): | 380 def _WriteAttributeContainer(self, attribute_container): |
309 """Writes an attribute container. | 381 """Writes an attribute container. |
310 | 382 |
311 The table for the container type must exist. | 383 The table for the container type must exist. |
312 | 384 |
313 Args: | 385 Args: |
314 attribute_container (AttributeContainer): attribute container. | 386 attribute_container (AttributeContainer): attribute container. |
315 """ | 387 """ |
316 if attribute_container.CONTAINER_TYPE == 'event': | 388 if attribute_container.CONTAINER_TYPE == self._CONTAINER_TYPE_EVENT: |
317 timestamp, serialized_data = self._serialized_event_heap.PopEvent() | 389 timestamp, serialized_data = self._serialized_event_heap.PopEvent() |
318 else: | 390 else: |
319 serialized_data = self._SerializeAttributeContainer(attribute_container) | 391 serialized_data = self._SerializeAttributeContainer(attribute_container) |
320 | 392 |
321 if self.compression_format == definitions.COMPRESSION_FORMAT_ZLIB: | 393 if self.compression_format == definitions.COMPRESSION_FORMAT_ZLIB: |
322 serialized_data = zlib.compress(serialized_data) | 394 serialized_data = zlib.compress(serialized_data) |
323 serialized_data = sqlite3.Binary(serialized_data) | 395 serialized_data = sqlite3.Binary(serialized_data) |
324 | 396 |
325 if attribute_container.CONTAINER_TYPE == 'event': | 397 if attribute_container.CONTAINER_TYPE == self._CONTAINER_TYPE_EVENT: |
326 query = 'INSERT INTO event (_timestamp, _data) VALUES (?, ?)' | 398 query = 'INSERT INTO event (_timestamp, _data) VALUES (?, ?)' |
327 self._cursor.execute(query, (timestamp, serialized_data)) | 399 self._cursor.execute(query, (timestamp, serialized_data)) |
328 else: | 400 else: |
329 query = 'INSERT INTO {0:s} (_data) VALUES (?)'.format( | 401 query = 'INSERT INTO {0:s} (_data) VALUES (?)'.format( |
330 attribute_container.CONTAINER_TYPE) | 402 attribute_container.CONTAINER_TYPE) |
331 self._cursor.execute(query, (serialized_data, )) | 403 self._cursor.execute(query, (serialized_data, )) |
332 | 404 |
333 identifier = identifiers.SQLTableIdentifier( | 405 identifier = identifiers.SQLTableIdentifier( |
334 attribute_container.CONTAINER_TYPE, self._cursor.lastrowid) | 406 attribute_container.CONTAINER_TYPE, self._cursor.lastrowid) |
335 attribute_container.SetIdentifier(identifier) | 407 attribute_container.SetIdentifier(identifier) |
336 | 408 |
337 def _WriteSerializedAttributeContainerList(self, container_type): | 409 def _WriteSerializedAttributeContainerList(self, container_type): |
338 """Writes a serialized attribute container list. | 410 """Writes a serialized attribute container list. |
339 | 411 |
340 Args: | 412 Args: |
341 container_type (str): attribute container type. | 413 container_type (str): attribute container type. |
342 """ | 414 """ |
343 if container_type == 'event': | 415 if container_type == self._CONTAINER_TYPE_EVENT: |
344 if not self._serialized_event_heap.data_size: | 416 if not self._serialized_event_heap.data_size: |
345 return | 417 return |
346 number_of_attribute_containers = ( | 418 number_of_attribute_containers = ( |
347 self._serialized_event_heap.number_of_events) | 419 self._serialized_event_heap.number_of_events) |
348 | 420 |
349 else: | 421 else: |
350 container_list = self._GetSerializedAttributeContainerList(container_type) | 422 container_list = self._GetSerializedAttributeContainerList(container_type) |
351 if not container_list.data_size: | 423 if not container_list.data_size: |
352 return | 424 return |
353 | 425 |
354 number_of_attribute_containers = ( | 426 number_of_attribute_containers = ( |
355 container_list.number_of_attribute_containers) | 427 container_list.number_of_attribute_containers) |
356 | 428 |
357 if self._serializers_profiler: | 429 if self._serializers_profiler: |
358 self._serializers_profiler.StartTiming('write') | 430 self._serializers_profiler.StartTiming('write') |
359 | 431 |
360 if container_type == 'event': | 432 if container_type == self._CONTAINER_TYPE_EVENT: |
361 query = 'INSERT INTO event (_timestamp, _data) VALUES (?, ?)' | 433 query = 'INSERT INTO event (_timestamp, _data) VALUES (?, ?)' |
362 else: | 434 else: |
363 query = 'INSERT INTO {0:s} (_data) VALUES (?)'.format(container_type) | 435 query = 'INSERT INTO {0:s} (_data) VALUES (?)'.format(container_type) |
364 | 436 |
365 # TODO: directly use container_list instead of values_tuple_list. | 437 # TODO: directly use container_list instead of values_tuple_list. |
366 values_tuple_list = [] | 438 values_tuple_list = [] |
367 for _ in range(number_of_attribute_containers): | 439 for _ in range(number_of_attribute_containers): |
368 if container_type == 'event': | 440 if container_type == self._CONTAINER_TYPE_EVENT: |
369 timestamp, serialized_data = self._serialized_event_heap.PopEvent() | 441 timestamp, serialized_data = self._serialized_event_heap.PopEvent() |
370 else: | 442 else: |
371 serialized_data = container_list.PopAttributeContainer() | 443 serialized_data = container_list.PopAttributeContainer() |
372 | 444 |
373 if self.compression_format == definitions.COMPRESSION_FORMAT_ZLIB: | 445 if self.compression_format == definitions.COMPRESSION_FORMAT_ZLIB: |
374 serialized_data = zlib.compress(serialized_data) | 446 serialized_data = zlib.compress(serialized_data) |
375 serialized_data = sqlite3.Binary(serialized_data) | 447 serialized_data = sqlite3.Binary(serialized_data) |
376 | 448 |
377 if container_type == 'event': | 449 if container_type == self._CONTAINER_TYPE_EVENT: |
378 values_tuple_list.append((timestamp, serialized_data)) | 450 values_tuple_list.append((timestamp, serialized_data)) |
379 else: | 451 else: |
380 values_tuple_list.append((serialized_data, )) | 452 values_tuple_list.append((serialized_data, )) |
381 | 453 |
382 self._cursor.executemany(query, values_tuple_list) | 454 self._cursor.executemany(query, values_tuple_list) |
383 | 455 |
384 if self._serializers_profiler: | 456 if self._serializers_profiler: |
385 self._serializers_profiler.StopTiming('write') | 457 self._serializers_profiler.StopTiming('write') |
386 | 458 |
387 if container_type == 'event': | 459 if container_type == self._CONTAINER_TYPE_EVENT: |
388 self._serialized_event_heap.Empty() | 460 self._serialized_event_heap.Empty() |
389 else: | 461 else: |
390 container_list.Empty() | 462 container_list.Empty() |
391 | 463 |
392 def _WriteStorageMetadata(self): | 464 def _WriteStorageMetadata(self): |
393 """Writes the storage metadata.""" | 465 """Writes the storage metadata.""" |
394 self._cursor.execute(self._CREATE_METADATA_TABLE_QUERY) | 466 self._cursor.execute(self._CREATE_METADATA_TABLE_QUERY) |
395 | 467 |
396 query = 'INSERT INTO metadata (key, value) VALUES (?, ?)' | 468 query = 'INSERT INTO metadata (key, value) VALUES (?, ?)' |
397 | 469 |
(...skipping 30 matching lines...) Expand all Loading... | |
428 """Adds an error. | 500 """Adds an error. |
429 | 501 |
430 Args: | 502 Args: |
431 error (ExtractionError): error. | 503 error (ExtractionError): error. |
432 | 504 |
433 Raises: | 505 Raises: |
434 IOError: when the storage file is closed or read-only. | 506 IOError: when the storage file is closed or read-only. |
435 """ | 507 """ |
436 self._RaiseIfNotWritable() | 508 self._RaiseIfNotWritable() |
437 | 509 |
438 self._AddAttributeContainer('extraction_error', error) | 510 self._AddAttributeContainer(self._CONTAINER_TYPE_EXTRACTION_ERROR, error) |
439 | 511 |
440 def AddEvent(self, event): | 512 def AddEvent(self, event): |
441 """Adds an event. | 513 """Adds an event. |
442 | 514 |
443 Args: | 515 Args: |
444 event (EventObject): event. | 516 event (EventObject): event. |
445 | 517 |
446 Raises: | 518 Raises: |
447 IOError: when the storage file is closed or read-only or | 519 IOError: when the storage file is closed or read-only or |
448 if the event data identifier type is not supported. | 520 if the event data identifier type is not supported. |
(...skipping 16 matching lines...) Expand all Loading... | |
465 """Adds event data. | 537 """Adds event data. |
466 | 538 |
467 Args: | 539 Args: |
468 event_data (EventData): event data. | 540 event_data (EventData): event data. |
469 | 541 |
470 Raises: | 542 Raises: |
471 IOError: when the storage file is closed or read-only. | 543 IOError: when the storage file is closed or read-only. |
472 """ | 544 """ |
473 self._RaiseIfNotWritable() | 545 self._RaiseIfNotWritable() |
474 | 546 |
475 self._AddAttributeContainer('event_data', event_data) | 547 self._AddAttributeContainer(self._CONTAINER_TYPE_EVENT_DATA, event_data) |
476 | 548 |
477 def AddEventSource(self, event_source): | 549 def AddEventSource(self, event_source): |
478 """Adds an event source. | 550 """Adds an event source. |
479 | 551 |
480 Args: | 552 Args: |
481 event_source (EventSource): event source. | 553 event_source (EventSource): event source. |
482 | 554 |
483 Raises: | 555 Raises: |
484 IOError: when the storage file is closed or read-only. | 556 IOError: when the storage file is closed or read-only. |
485 """ | 557 """ |
486 self._RaiseIfNotWritable() | 558 self._RaiseIfNotWritable() |
487 | 559 |
488 self._AddAttributeContainer('event_source', event_source) | 560 self._AddAttributeContainer( |
561 self._CONTAINER_TYPE_EVENT_SOURCE, event_source) | |
489 | 562 |
490 def AddEventTag(self, event_tag): | 563 def AddEventTag(self, event_tag): |
491 """Adds an event tag. | 564 """Adds an event tag. |
492 | 565 |
493 Args: | 566 Args: |
494 event_tag (EventTag): event tag. | 567 event_tag (EventTag): event tag. |
495 | 568 |
496 Raises: | 569 Raises: |
497 IOError: when the storage file is closed or read-only or | 570 IOError: when the storage file is closed or read-only or |
498 if the event identifier type is not supported. | 571 if the event identifier type is not supported. |
499 """ | 572 """ |
500 self._RaiseIfNotWritable() | 573 self._RaiseIfNotWritable() |
501 | 574 |
502 event_identifier = event_tag.GetEventIdentifier() | 575 event_identifier = event_tag.GetEventIdentifier() |
503 if not isinstance(event_identifier, identifiers.SQLTableIdentifier): | 576 if not isinstance(event_identifier, identifiers.SQLTableIdentifier): |
504 raise IOError('Unsupported event identifier type: {0:s}'.format( | 577 raise IOError('Unsupported event identifier type: {0:s}'.format( |
505 type(event_identifier))) | 578 type(event_identifier))) |
506 | 579 |
507 event_tag.event_row_identifier = event_identifier.row_identifier | 580 event_tag.event_row_identifier = event_identifier.row_identifier |
508 | 581 |
509 self._AddAttributeContainer('event_tag', event_tag) | 582 self._AddAttributeContainer(self._CONTAINER_TYPE_EVENT_TAG, event_tag) |
510 | 583 |
511 def AddEventTags(self, event_tags): | 584 def AddEventTags(self, event_tags): |
512 """Adds event tags. | 585 """Adds event tags. |
513 | 586 |
514 Args: | 587 Args: |
515 event_tags (list[EventTag]): event tags. | 588 event_tags (list[EventTag]): event tags. |
516 | 589 |
517 Raises: | 590 Raises: |
518 IOError: when the storage file is closed or read-only or | 591 IOError: when the storage file is closed or read-only or |
519 if the event tags cannot be serialized. | 592 if the event tags cannot be serialized. |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
557 def Close(self): | 630 def Close(self): |
558 """Closes the storage. | 631 """Closes the storage. |
559 | 632 |
560 Raises: | 633 Raises: |
561 IOError: if the storage file is already closed. | 634 IOError: if the storage file is already closed. |
562 """ | 635 """ |
563 if not self._is_open: | 636 if not self._is_open: |
564 raise IOError('Storage file already closed.') | 637 raise IOError('Storage file already closed.') |
565 | 638 |
566 if not self._read_only: | 639 if not self._read_only: |
567 self._WriteSerializedAttributeContainerList('event_source') | 640 self._WriteSerializedAttributeContainerList( |
568 self._WriteSerializedAttributeContainerList('event_data') | 641 self._CONTAINER_TYPE_EVENT_SOURCE) |
569 self._WriteSerializedAttributeContainerList('event') | 642 self._WriteSerializedAttributeContainerList( |
570 self._WriteSerializedAttributeContainerList('event_tag') | 643 self._CONTAINER_TYPE_EVENT_DATA) |
571 self._WriteSerializedAttributeContainerList('extraction_error') | 644 self._WriteSerializedAttributeContainerList(self._CONTAINER_TYPE_EVENT) |
645 self._WriteSerializedAttributeContainerList( | |
646 self._CONTAINER_TYPE_EVENT_TAG) | |
647 self._WriteSerializedAttributeContainerList( | |
648 self._CONTAINER_TYPE_EXTRACTION_ERROR) | |
572 | 649 |
573 if self._serializers_profiler: | 650 if self._serializers_profiler: |
574 self._serializers_profiler.Write() | 651 self._serializers_profiler.Write() |
575 | 652 |
576 if self._connection: | 653 if self._connection: |
577 # We need to run commit or not all data is stored in the database. | 654 # We need to run commit or not all data is stored in the database. |
578 self._connection.commit() | 655 self._connection.commit() |
579 self._connection.close() | 656 self._connection.close() |
580 | 657 |
581 self._connection = None | 658 self._connection = None |
582 self._cursor = None | 659 self._cursor = None |
583 | 660 |
584 self._is_open = False | 661 self._is_open = False |
585 | 662 |
586 def GetAnalysisReports(self): | 663 def GetAnalysisReports(self): |
587 """Retrieves the analysis reports. | 664 """Retrieves the analysis reports. |
588 | 665 |
589 Returns: | 666 Returns: |
590 generator(AnalysisReport): analysis report generator. | 667 generator(AnalysisReport): analysis report generator. |
591 """ | 668 """ |
592 return self._GetAttributeContainers('analysis_report') | 669 return self._GetAttributeContainers(self._CONTAINER_TYPE_ANALYSIS_REPORT) |
593 | 670 |
594 def GetErrors(self): | 671 def GetErrors(self): |
595 """Retrieves the errors. | 672 """Retrieves the errors. |
596 | 673 |
597 Returns: | 674 Returns: |
598 generator(ExtractionError): error generator. | 675 generator(ExtractionError): error generator. |
599 """ | 676 """ |
600 return self._GetAttributeContainers('extraction_error') | 677 return self._GetAttributeContainers(self._CONTAINER_TYPE_EXTRACTION_ERROR) |
601 | 678 |
602 def GetEvents(self): | 679 def GetEvents(self): |
603 """Retrieves the events. | 680 """Retrieves the events. |
604 | 681 |
605 Yield: | 682 Yield: |
606 EventObject: event. | 683 EventObject: event. |
607 """ | 684 """ |
608 for event in self._GetAttributeContainers('event'): | 685 for event in self._GetAttributeContainers(self._CONTAINER_TYPE_EVENT): |
609 if hasattr(event, 'event_data_row_identifier'): | 686 # TODO: refactor this into psort. |
610 event_data_identifier = identifiers.SQLTableIdentifier( | 687 self._ReadEventDataIntoEvent(event) |
611 'event_data', event.event_data_row_identifier) | |
612 event.SetEventDataIdentifier(event_data_identifier) | |
613 | |
614 del event.event_data_row_identifier | |
615 | 688 |
616 yield event | 689 yield event |
617 | 690 |
618 def GetEventData(self): | 691 def GetEventData(self): |
619 """Retrieves the event data. | 692 """Retrieves the event data. |
620 | 693 |
621 Yields: | 694 Yields: |
622 generator(EventData): event data generator. | 695 generator(EventData): event data generator. |
623 """ | 696 """ |
624 return self._GetAttributeContainers('event_data') | 697 return self._GetAttributeContainers(self._CONTAINER_TYPE_EVENT_DATA) |
625 | 698 |
626 def GetEventDataByIdentifier(self, identifier): | 699 def GetEventDataByIdentifier(self, identifier): |
627 """Retrieves specific event data. | 700 """Retrieves specific event data. |
628 | 701 |
629 Args: | 702 Args: |
630 identifier (SQLTableIdentifier): event data identifier. | 703 identifier (SQLTableIdentifier): event data identifier. |
631 | 704 |
632 Returns: | 705 Returns: |
633 EventData: event data or None if not available. | 706 EventData: event data or None if not available. |
634 """ | 707 """ |
635 return self._GetAttributeContainerByIndex( | 708 return self._GetAttributeContainerByIndex( |
636 'event_data', identifier.row_identifier - 1) | 709 self._CONTAINER_TYPE_EVENT_DATA, identifier.row_identifier - 1) |
637 | 710 |
638 def GetEventSourceByIndex(self, index): | 711 def GetEventSourceByIndex(self, index): |
639 """Retrieves a specific event source. | 712 """Retrieves a specific event source. |
640 | 713 |
641 Args: | 714 Args: |
642 index (int): event source index. | 715 index (int): event source index. |
643 | 716 |
644 Returns: | 717 Returns: |
645 EventSource: event source or None if not available. | 718 EventSource: event source or None if not available. |
646 """ | 719 """ |
647 return self._GetAttributeContainerByIndex('event_source', index) | 720 return self._GetAttributeContainerByIndex( |
721 self._CONTAINER_TYPE_EVENT_SOURCE, index) | |
648 | 722 |
649 def GetEventSources(self): | 723 def GetEventSources(self): |
650 """Retrieves the event sources. | 724 """Retrieves the event sources. |
651 | 725 |
652 Yields: | 726 Yields: |
653 generator(EventSource): event source generator. | 727 generator(EventSource): event source generator. |
654 """ | 728 """ |
655 return self._GetAttributeContainers('event_source') | 729 return self._GetAttributeContainers(self._CONTAINER_TYPE_EVENT_SOURCE) |
656 | 730 |
657 def GetEventTagByIdentifier(self, identifier): | 731 def GetEventTagByIdentifier(self, identifier): |
658 """Retrieves a specific event tag. | 732 """Retrieves a specific event tag. |
659 | 733 |
660 Args: | 734 Args: |
661 identifier (SQLTableIdentifier): event tag identifier. | 735 identifier (SQLTableIdentifier): event tag identifier. |
662 | 736 |
663 Returns: | 737 Returns: |
664 EventTag: event tag or None if not available. | 738 EventTag: event tag or None if not available. |
665 """ | 739 """ |
666 event_tag = self._GetAttributeContainerByIndex( | 740 event_tag = self._GetAttributeContainerByIndex( |
667 'event_tag', identifier.row_identifier - 1) | 741 self._CONTAINER_TYPE_EVENT_TAG, identifier.row_identifier - 1) |
668 if event_tag: | 742 if event_tag: |
669 event_identifier = identifiers.SQLTableIdentifier( | 743 event_identifier = identifiers.SQLTableIdentifier( |
670 'event', event_tag.event_row_identifier) | 744 self._CONTAINER_TYPE_EVENT, event_tag.event_row_identifier) |
671 event_tag.SetEventIdentifier(event_identifier) | 745 event_tag.SetEventIdentifier(event_identifier) |
672 | 746 |
673 del event_tag.event_row_identifier | 747 del event_tag.event_row_identifier |
674 | 748 |
675 return event_tag | 749 return event_tag |
676 | 750 |
677 def GetEventTags(self): | 751 def GetEventTags(self): |
678 """Retrieves the event tags. | 752 """Retrieves the event tags. |
679 | 753 |
680 Yields: | 754 Yields: |
681 EventTag: event tag. | 755 EventTag: event tag. |
682 """ | 756 """ |
683 for event_tag in self._GetAttributeContainers('event_tag'): | 757 for event_tag in self._GetAttributeContainers( |
758 self._CONTAINER_TYPE_EVENT_TAG): | |
684 event_identifier = identifiers.SQLTableIdentifier( | 759 event_identifier = identifiers.SQLTableIdentifier( |
685 'event', event_tag.event_row_identifier) | 760 self._CONTAINER_TYPE_EVENT, event_tag.event_row_identifier) |
686 event_tag.SetEventIdentifier(event_identifier) | 761 event_tag.SetEventIdentifier(event_identifier) |
687 | 762 |
688 del event_tag.event_row_identifier | 763 del event_tag.event_row_identifier |
689 | 764 |
690 yield event_tag | 765 yield event_tag |
691 | 766 |
692 def GetNumberOfAnalysisReports(self): | 767 def GetNumberOfAnalysisReports(self): |
693 """Retrieves the number analysis reports. | 768 """Retrieves the number analysis reports. |
694 | 769 |
695 Returns: | 770 Returns: |
696 int: number of analysis reports. | 771 int: number of analysis reports. |
697 """ | 772 """ |
698 if not self._HasTable('analysis_reports'): | 773 if not self._HasTable(self._CONTAINER_TYPE_ANALYSIS_REPORT): |
699 return 0 | 774 return 0 |
700 | 775 |
701 query = 'SELECT COUNT(*) FROM analysis_reports' | 776 query = 'SELECT COUNT(*) FROM {0:s}'.format( |
777 self._CONTAINER_TYPE_ANALYSIS_REPORT) | |
702 self._cursor.execute(query) | 778 self._cursor.execute(query) |
703 | 779 |
704 row = self._cursor.fetchone() | 780 row = self._cursor.fetchone() |
705 return row[0] | 781 return row[0] |
706 | 782 |
707 def GetNumberOfEventSources(self): | 783 def GetNumberOfEventSources(self): |
708 """Retrieves the number event sources. | 784 """Retrieves the number event sources. |
709 | 785 |
710 Returns: | 786 Returns: |
711 int: number of event sources. | 787 int: number of event sources. |
712 """ | 788 """ |
713 if not self._HasTable('event_source'): | 789 if not self._HasTable(self._CONTAINER_TYPE_EVENT_SOURCE): |
714 return 0 | 790 return 0 |
715 | 791 |
716 query = 'SELECT COUNT(*) FROM event_source' | 792 query = 'SELECT COUNT(*) FROM {0:s}'.format( |
793 self._CONTAINER_TYPE_EVENT_SOURCE) | |
717 self._cursor.execute(query) | 794 self._cursor.execute(query) |
718 | 795 |
719 row = self._cursor.fetchone() | 796 row = self._cursor.fetchone() |
720 number_of_event_sources = row[0] | 797 number_of_event_sources = row[0] |
721 | 798 |
722 number_of_event_sources += self._GetNumberOfSerializedAttributeContainers( | 799 number_of_event_sources += self._GetNumberOfSerializedAttributeContainers( |
723 'event_sources') | 800 self._CONTAINER_TYPE_EVENT_SOURCE) |
724 return number_of_event_sources | 801 return number_of_event_sources |
725 | 802 |
726 def GetSessions(self): | 803 def GetSessions(self): |
727 """Retrieves the sessions. | 804 """Retrieves the sessions. |
728 | 805 |
729 Yields: | 806 Yields: |
730 Session: session attribute container. | 807 Session: session attribute container. |
731 | 808 |
732 Raises: | 809 Raises: |
733 IOError: if a stream is missing or there is a mismatch in session | 810 IOError: if a stream is missing or there is a mismatch in session |
734 identifiers between the session start and completion attribute | 811 identifiers between the session start and completion attribute |
735 containers. | 812 containers. |
736 """ | 813 """ |
737 session_start_generator = self._GetAttributeContainers('session_start') | 814 session_start_generator = self._GetAttributeContainers( |
815 self._CONTAINER_TYPE_SESSION_START) | |
738 session_completion_generator = self._GetAttributeContainers( | 816 session_completion_generator = self._GetAttributeContainers( |
739 'session_completion') | 817 self._CONTAINER_TYPE_SESSION_COMPLETION) |
740 | 818 |
741 for session_index in range(0, self._last_session): | 819 for session_index in range(0, self._last_session): |
742 session_start = next(session_start_generator) | 820 session_start = next(session_start_generator) |
743 session_completion = next(session_completion_generator) | 821 session_completion = next(session_completion_generator) |
744 | 822 |
745 session = sessions.Session() | 823 session = sessions.Session() |
746 session.CopyAttributesFromSessionStart(session_start) | 824 session.CopyAttributesFromSessionStart(session_start) |
747 if session_completion: | 825 if session_completion: |
748 try: | 826 try: |
749 session.CopyAttributesFromSessionCompletion(session_completion) | 827 session.CopyAttributesFromSessionCompletion(session_completion) |
(...skipping 22 matching lines...) Expand all Loading... | |
772 filter_expression.append( | 850 filter_expression.append( |
773 '_timestamp >= {0:d}'.format(time_range.start_timestamp)) | 851 '_timestamp >= {0:d}'.format(time_range.start_timestamp)) |
774 | 852 |
775 if time_range.end_timestamp: | 853 if time_range.end_timestamp: |
776 filter_expression.append( | 854 filter_expression.append( |
777 '_timestamp <= {0:d}'.format(time_range.end_timestamp)) | 855 '_timestamp <= {0:d}'.format(time_range.end_timestamp)) |
778 | 856 |
779 filter_expression = ' AND '.join(filter_expression) | 857 filter_expression = ' AND '.join(filter_expression) |
780 | 858 |
781 event_generator = self._GetAttributeContainers( | 859 event_generator = self._GetAttributeContainers( |
782 'event', filter_expression=filter_expression, order_by='_timestamp') | 860 self._CONTAINER_TYPE_EVENT, filter_expression=filter_expression, |
861 order_by='_timestamp') | |
783 | 862 |
784 for event in event_generator: | 863 for event in event_generator: |
785 if hasattr(event, 'event_data_row_identifier'): | 864 # TODO: refactor this into psort. |
786 event_data_identifier = identifiers.SQLTableIdentifier( | 865 self._ReadEventDataIntoEvent(event) |
787 'event_data', event.event_data_row_identifier) | |
788 event.SetEventDataIdentifier(event_data_identifier) | |
789 | |
790 del event.event_data_row_identifier | |
791 | 866 |
792 yield event | 867 yield event |
793 | 868 |
794 def HasAnalysisReports(self): | 869 def HasAnalysisReports(self): |
795 """Determines if a store contains analysis reports. | 870 """Determines if a store contains analysis reports. |
796 | 871 |
797 Returns: | 872 Returns: |
798 bool: True if the store contains analysis reports. | 873 bool: True if the store contains analysis reports. |
799 """ | 874 """ |
800 query = 'SELECT COUNT(*) FROM analysis_report' | 875 return self._HasAttributeContainers(self._CONTAINER_TYPE_ANALYSIS_REPORT) |
801 self._cursor.execute(query) | |
802 | |
803 row = self._cursor.fetchone() | |
804 return row and row[0] != 0 | |
805 | 876 |
806 def HasErrors(self): | 877 def HasErrors(self): |
807 """Determines if a store contains extraction errors. | 878 """Determines if a store contains extraction errors. |
808 | 879 |
809 Returns: | 880 Returns: |
810 bool: True if the store contains extraction errors. | 881 bool: True if the store contains extraction errors. |
811 """ | 882 """ |
812 query = 'SELECT COUNT(*) FROM extraction_error' | 883 return self._HasAttributeContainers(self._CONTAINER_TYPE_EXTRACTION_ERROR) |
813 self._cursor.execute(query) | |
814 | |
815 row = self._cursor.fetchone() | |
816 return row and row[0] != 0 | |
817 | 884 |
818 def HasEventTags(self): | 885 def HasEventTags(self): |
819 """Determines if a store contains event tags. | 886 """Determines if a store contains event tags. |
820 | 887 |
821 Returns: | 888 Returns: |
822 bool: True if the store contains event tags. | 889 bool: True if the store contains event tags. |
823 """ | 890 """ |
824 query = 'SELECT COUNT(*) FROM event_tags' | 891 return self._HasAttributeContainers(self._CONTAINER_TYPE_EVENT_TAG) |
825 self._cursor.execute(query) | |
826 | |
827 row = self._cursor.fetchone() | |
828 return row and row[0] != 0 | |
829 | 892 |
830 # pylint: disable=arguments-differ | 893 # pylint: disable=arguments-differ |
831 def Open(self, path=None, read_only=True, **unused_kwargs): | 894 def Open(self, path=None, read_only=True, **unused_kwargs): |
832 """Opens the storage. | 895 """Opens the storage. |
833 | 896 |
834 Args: | 897 Args: |
835 path (Optional[str]): path to the storage file. | 898 path (Optional[str]): path to the storage file. |
836 read_only (Optional[bool]): True if the file should be opened in | 899 read_only (Optional[bool]): True if the file should be opened in |
837 read-only mode. | 900 read-only mode. |
838 | 901 |
(...skipping 15 matching lines...) Expand all Loading... | |
854 | 917 |
855 cursor = connection.cursor() | 918 cursor = connection.cursor() |
856 if not cursor: | 919 if not cursor: |
857 return False | 920 return False |
858 | 921 |
859 self._connection = connection | 922 self._connection = connection |
860 self._cursor = cursor | 923 self._cursor = cursor |
861 self._is_open = True | 924 self._is_open = True |
862 self._read_only = read_only | 925 self._read_only = read_only |
863 | 926 |
864 if not read_only: | 927 if read_only: |
928 self._ReadStorageMetadata() | |
929 else: | |
865 # self._cursor.execute('PRAGMA journal_mode=MEMORY') | 930 # self._cursor.execute('PRAGMA journal_mode=MEMORY') |
866 | 931 |
867 # Turn off insert transaction integrity since we want to do bulk insert. | 932 # Turn off insert transaction integrity since we want to do bulk insert. |
868 self._cursor.execute('PRAGMA synchronous=OFF') | 933 self._cursor.execute('PRAGMA synchronous=OFF') |
869 | 934 |
870 if not self._HasTable('metadata'): | 935 if not self._HasTable('metadata'): |
871 self._WriteStorageMetadata() | 936 self._WriteStorageMetadata() |
872 else: | 937 else: |
873 self._ReadStorageMetadata() | 938 self._ReadStorageMetadata() |
874 | 939 |
875 if self.compression_format == definitions.COMPRESSION_FORMAT_ZLIB: | 940 if self.compression_format == definitions.COMPRESSION_FORMAT_ZLIB: |
876 data_column_type = 'BLOB' | 941 data_column_type = 'BLOB' |
877 else: | 942 else: |
878 data_column_type = 'TEXT' | 943 data_column_type = 'TEXT' |
879 | 944 |
880 for container_type in self._CONTAINER_TYPES: | 945 for container_type in self._CONTAINER_TYPES: |
881 if not self._HasTable(container_type): | 946 if not self._HasTable(container_type): |
882 if container_type == 'event': | 947 if container_type == self._CONTAINER_TYPE_EVENT: |
883 query = self._CREATE_EVENT_TABLE_QUERY.format( | 948 query = self._CREATE_EVENT_TABLE_QUERY.format( |
884 container_type, data_column_type) | 949 container_type, data_column_type) |
885 else: | 950 else: |
886 query = self._CREATE_TABLE_QUERY.format( | 951 query = self._CREATE_TABLE_QUERY.format( |
887 container_type, data_column_type) | 952 container_type, data_column_type) |
888 self._cursor.execute(query) | 953 self._cursor.execute(query) |
889 | 954 |
890 self._connection.commit() | 955 self._connection.commit() |
891 | 956 |
892 last_session_start = 0 | 957 last_session_start = 0 |
893 if self._HasTable('session_start'): | 958 if self._HasTable(self._CONTAINER_TYPE_SESSION_START): |
894 query = 'SELECT COUNT(*) FROM session_start' | 959 query = 'SELECT COUNT(*) FROM {0:s}'.format( |
960 self._CONTAINER_TYPE_SESSION_START) | |
895 self._cursor.execute(query) | 961 self._cursor.execute(query) |
896 row = self._cursor.fetchone() | 962 row = self._cursor.fetchone() |
897 last_session_start = row[0] | 963 last_session_start = row[0] |
898 | 964 |
899 last_session_completion = 0 | 965 last_session_completion = 0 |
900 if self._HasTable('session_completion'): | 966 if self._HasTable(self._CONTAINER_TYPE_SESSION_COMPLETION): |
901 query = 'SELECT COUNT(*) FROM session_completion' | 967 query = 'SELECT COUNT(*) FROM {0:s}'.format( |
968 self._CONTAINER_TYPE_SESSION_COMPLETION) | |
902 self._cursor.execute(query) | 969 self._cursor.execute(query) |
903 row = self._cursor.fetchone() | 970 row = self._cursor.fetchone() |
904 last_session_completion = row[0] | 971 last_session_completion = row[0] |
905 | 972 |
906 # TODO: handle open sessions. | 973 # TODO: handle open sessions. |
907 if last_session_start != last_session_completion: | 974 if last_session_start != last_session_completion: |
908 logging.warning('Detected unclosed session.') | 975 logging.warning('Detected unclosed session.') |
909 | 976 |
910 self._last_session = last_session_completion | 977 self._last_session = last_session_completion |
911 | 978 |
912 def ReadPreprocessingInformation(self, knowledge_base): | 979 def ReadPreprocessingInformation(self, knowledge_base): |
913 """Reads preprocessing information. | 980 """Reads preprocessing information. |
914 | 981 |
915 The preprocessing information contains the system configuration which | 982 The preprocessing information contains the system configuration which |
916 contains information about various system specific configuration data, | 983 contains information about various system specific configuration data, |
917 for example the user accounts. | 984 for example the user accounts. |
918 | 985 |
919 Args: | 986 Args: |
920 knowledge_base (KnowledgeBase): is used to store the preprocessing | 987 knowledge_base (KnowledgeBase): is used to store the preprocessing |
921 information. | 988 information. |
922 """ | 989 """ |
923 generator = self._GetAttributeContainers('system_configuration') | 990 generator = self._GetAttributeContainers( |
991 self._CONTAINER_TYPE_SYSTEM_CONFIGURATION) | |
924 for stream_number, system_configuration in enumerate(generator): | 992 for stream_number, system_configuration in enumerate(generator): |
925 # TODO: replace stream_number by session_identifier. | 993 # TODO: replace stream_number by session_identifier. |
926 knowledge_base.ReadSystemConfigurationArtifact( | 994 knowledge_base.ReadSystemConfigurationArtifact( |
927 system_configuration, session_identifier=stream_number) | 995 system_configuration, session_identifier=stream_number) |
928 | 996 |
929 def WritePreprocessingInformation(self, knowledge_base): | 997 def WritePreprocessingInformation(self, knowledge_base): |
930 """Writes preprocessing information. | 998 """Writes preprocessing information. |
931 | 999 |
932 Args: | 1000 Args: |
933 knowledge_base (KnowledgeBase): contains the preprocessing information. | 1001 knowledge_base (KnowledgeBase): contains the preprocessing information. |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
989 | 1057 |
990 Args: | 1058 Args: |
991 task_start (TaskStart): task start information. | 1059 task_start (TaskStart): task start information. |
992 | 1060 |
993 Raises: | 1061 Raises: |
994 IOError: when the storage file is closed or read-only. | 1062 IOError: when the storage file is closed or read-only. |
995 """ | 1063 """ |
996 self._RaiseIfNotWritable() | 1064 self._RaiseIfNotWritable() |
997 | 1065 |
998 self._WriteAttributeContainer(task_start) | 1066 self._WriteAttributeContainer(task_start) |
LEFT | RIGHT |