OLD | NEW |
| (Empty) |
1 # -*- coding: utf-8 -*- | |
2 """The fake storage intended for testing.""" | |
3 | |
4 from __future__ import unicode_literals | |
5 | |
6 import copy | |
7 | |
8 from plaso.lib import definitions | |
9 from plaso.storage import event_heaps | |
10 from plaso.storage import identifiers | |
11 from plaso.storage import interface | |
12 | |
13 | |
14 class FakeStorageWriter(interface.StorageWriter): | |
15 """Fake storage writer object. | |
16 | |
17 Attributes: | |
18 analysis_reports (list[AnalysisReport]): analysis reports. | |
19 session_completion (SessionCompletion): session completion attribute | |
20 container. | |
21 session_start (SessionStart): session start attribute container. | |
22 task_completion (TaskCompletion): task completion attribute container. | |
23 task_start (TaskStart): task start attribute container. | |
24 """ | |
25 | |
26 def __init__( | |
27 self, session, storage_type=definitions.STORAGE_TYPE_SESSION, task=None): | |
28 """Initializes a storage writer object. | |
29 | |
30 Args: | |
31 session (Session): session the storage changes are part of. | |
32 storage_type (Optional[str]): storage type. | |
33 task(Optional[Task]): task. | |
34 """ | |
35 super(FakeStorageWriter, self).__init__( | |
36 session, storage_type=storage_type, task=task) | |
37 self._errors = [] | |
38 self._event_data = {} | |
39 self._event_sources = [] | |
40 self._event_tags = [] | |
41 self._events = [] | |
42 self._is_open = False | |
43 self._task_storage_writers = {} | |
44 self.analysis_reports = [] | |
45 self.session_completion = None | |
46 self.session_start = None | |
47 self.task_completion = None | |
48 self.task_start = None | |
49 | |
50 def _PrepareAttributeContainer(self, attribute_container): | |
51 """Prepares an attribute container for storage. | |
52 | |
53 Args: | |
54 attribute_container (AttributeContainer): attribute container. | |
55 | |
56 Returns: | |
57 AttributeContainer: copy of the attribute container to store in | |
58 the fake storage. | |
59 """ | |
60 attribute_values_hash = hash(attribute_container.GetAttributeValuesString()) | |
61 identifier = identifiers.FakeIdentifier(attribute_values_hash) | |
62 attribute_container.SetIdentifier(identifier) | |
63 | |
64 # Make sure the fake storage preserves the state of the attribute container. | |
65 return copy.deepcopy(attribute_container) | |
66 | |
67 def _RaiseIfNotWritable(self): | |
68 """Raises if the storage file is not writable. | |
69 | |
70 Raises: | |
71 IOError: when the storage writer is closed. | |
72 """ | |
73 if not self._is_open: | |
74 raise IOError('Unable to write to closed storage writer.') | |
75 | |
76 def _ReadEventDataIntoEvent(self, event): | |
77 """Reads the data into the event. | |
78 | |
79 This function is intended to offer backwards compatible event behavior. | |
80 | |
81 Args: | |
82 event (EventObject): event. | |
83 """ | |
84 if self._storage_type != definitions.STORAGE_TYPE_SESSION: | |
85 return | |
86 | |
87 event_data_identifier = event.GetEventDataIdentifier() | |
88 if event_data_identifier: | |
89 lookup_key = event_data_identifier.CopyToString() | |
90 event_data = self._event_data[lookup_key] | |
91 | |
92 for attribute_name, attribute_value in event_data.GetAttributes(): | |
93 setattr(event, attribute_name, attribute_value) | |
94 | |
95 def AddAnalysisReport(self, analysis_report): | |
96 """Adds an analysis report. | |
97 | |
98 Args: | |
99 analysis_report (AnalysisReport): analysis report. | |
100 | |
101 Raises: | |
102 IOError: when the storage writer is closed. | |
103 """ | |
104 self._RaiseIfNotWritable() | |
105 | |
106 analysis_report = self._PrepareAttributeContainer(analysis_report) | |
107 | |
108 self.analysis_reports.append(analysis_report) | |
109 | |
110 def AddError(self, error): | |
111 """Adds an error. | |
112 | |
113 Args: | |
114 error (ExtractionError): error. | |
115 | |
116 Raises: | |
117 IOError: when the storage writer is closed. | |
118 """ | |
119 self._RaiseIfNotWritable() | |
120 | |
121 error = self._PrepareAttributeContainer(error) | |
122 | |
123 self._errors.append(error) | |
124 self.number_of_errors += 1 | |
125 | |
126 def AddEvent(self, event): | |
127 """Adds an event. | |
128 | |
129 Args: | |
130 event (EventObject): event. | |
131 | |
132 Raises: | |
133 IOError: when the storage writer is closed or | |
134 if the event data identifier type is not supported. | |
135 """ | |
136 self._RaiseIfNotWritable() | |
137 | |
138 # TODO: change to no longer allow event_data_identifier is None | |
139 # after refactoring every parser to generate event data. | |
140 event_data_identifier = event.GetEventDataIdentifier() | |
141 if event_data_identifier: | |
142 if not isinstance(event_data_identifier, identifiers.FakeIdentifier): | |
143 raise IOError('Unsupported event data identifier type: {0:s}'.format( | |
144 type(event_data_identifier))) | |
145 | |
146 event = self._PrepareAttributeContainer(event) | |
147 | |
148 self._events.append(event) | |
149 self.number_of_events += 1 | |
150 | |
151 def AddEventData(self, event_data): | |
152 """Adds event data. | |
153 | |
154 Args: | |
155 event_data (EventData): event data. | |
156 | |
157 Raises: | |
158 IOError: when the storage writer is closed. | |
159 """ | |
160 self._RaiseIfNotWritable() | |
161 | |
162 event_data = self._PrepareAttributeContainer(event_data) | |
163 | |
164 identifier = event_data.GetIdentifier() | |
165 lookup_key = identifier.CopyToString() | |
166 self._event_data[lookup_key] = event_data | |
167 | |
168 def AddEventSource(self, event_source): | |
169 """Adds an event source. | |
170 | |
171 Args: | |
172 event_source (EventSource): event source. | |
173 | |
174 Raises: | |
175 IOError: when the storage writer is closed. | |
176 """ | |
177 self._RaiseIfNotWritable() | |
178 | |
179 event_source = self._PrepareAttributeContainer(event_source) | |
180 | |
181 self._event_sources.append(event_source) | |
182 self.number_of_event_sources += 1 | |
183 | |
184 def AddEventTag(self, event_tag): | |
185 """Adds an event tag. | |
186 | |
187 Args: | |
188 event_tag (EventTag): event tag. | |
189 | |
190 Raises: | |
191 IOError: when the storage writer is closed. | |
192 """ | |
193 self._RaiseIfNotWritable() | |
194 | |
195 event_identifier = event_tag.GetEventIdentifier() | |
196 if not isinstance(event_identifier, identifiers.FakeIdentifier): | |
197 raise IOError('Unsupported event identifier type: {0:s}'.format( | |
198 type(event_identifier))) | |
199 | |
200 event_tag = self._PrepareAttributeContainer(event_tag) | |
201 | |
202 self._event_tags.append(event_tag) | |
203 self.number_of_event_tags += 1 | |
204 | |
205 def CreateTaskStorage(self, task): | |
206 """Creates a task storage. | |
207 | |
208 Args: | |
209 task (Task): task. | |
210 | |
211 Returns: | |
212 FakeStorageWriter: storage writer. | |
213 | |
214 Raises: | |
215 IOError: if the task storage already exists. | |
216 """ | |
217 if task.identifier in self._task_storage_writers: | |
218 raise IOError('Storage writer for task: {0:s} already exists.'.format( | |
219 task.identifier)) | |
220 | |
221 storage_writer = FakeStorageWriter( | |
222 self._session, storage_type=definitions.STORAGE_TYPE_TASK, task=task) | |
223 self._task_storage_writers[task.identifier] = storage_writer | |
224 return storage_writer | |
225 | |
226 def Close(self): | |
227 """Closes the storage writer. | |
228 | |
229 Raises: | |
230 IOError: when the storage writer is closed. | |
231 """ | |
232 self._RaiseIfNotWritable() | |
233 | |
234 self._is_open = False | |
235 | |
236 def GetErrors(self): | |
237 """Retrieves the errors. | |
238 | |
239 Returns: | |
240 generator(ExtractionError): error generator. | |
241 """ | |
242 return iter(self._errors) | |
243 | |
244 def GetEvents(self): | |
245 """Retrieves the events. | |
246 | |
247 Yields: | |
248 EventObject: event. | |
249 """ | |
250 for event in self._events: | |
251 # TODO: refactor this into psort. | |
252 self._ReadEventDataIntoEvent(event) | |
253 | |
254 yield event | |
255 | |
256 def GetEventData(self): | |
257 """Retrieves the event data. | |
258 | |
259 Returns: | |
260 generator(EventData): event data generator. | |
261 """ | |
262 return iter(self._event_data.values()) | |
263 | |
264 def GetEventSources(self): | |
265 """Retrieves the event sources. | |
266 | |
267 Returns: | |
268 generator(EventSource): event source generator. | |
269 """ | |
270 return iter(self._event_sources) | |
271 | |
272 def GetEventTags(self): | |
273 """Retrieves the event tags. | |
274 | |
275 Returns: | |
276 generator(EventTags): event tag generator. | |
277 """ | |
278 return iter(self._event_tags) | |
279 | |
280 def GetFirstWrittenEventSource(self): | |
281 """Retrieves the first event source that was written after open. | |
282 | |
283 Using GetFirstWrittenEventSource and GetNextWrittenEventSource newly | |
284 added event sources can be retrieved in order of addition. | |
285 | |
286 Returns: | |
287 EventSource: event source or None if there are no newly written ones. | |
288 | |
289 Raises: | |
290 IOError: when the storage writer is closed. | |
291 """ | |
292 if not self._is_open: | |
293 raise IOError('Unable to read from closed storage writer.') | |
294 | |
295 if self._written_event_source_index >= len(self._event_sources): | |
296 return | |
297 | |
298 event_source = self._event_sources[self._first_written_event_source_index] | |
299 self._written_event_source_index = ( | |
300 self._first_written_event_source_index + 1) | |
301 return event_source | |
302 | |
303 def GetNextWrittenEventSource(self): | |
304 """Retrieves the next event source that was written after open. | |
305 | |
306 Returns: | |
307 EventSource: event source or None if there are no newly written ones. | |
308 | |
309 Raises: | |
310 IOError: when the storage writer is closed. | |
311 """ | |
312 if not self._is_open: | |
313 raise IOError('Unable to read from closed storage writer.') | |
314 | |
315 if self._written_event_source_index >= len(self._event_sources): | |
316 return | |
317 | |
318 event_source = self._event_sources[self._written_event_source_index] | |
319 self._written_event_source_index += 1 | |
320 return event_source | |
321 | |
322 def GetSortedEvents(self, time_range=None): | |
323 """Retrieves the events in increasing chronological order. | |
324 | |
325 Args: | |
326 time_range (Optional[TimeRange]): time range used to filter events | |
327 that fall in a specific period. | |
328 | |
329 Returns: | |
330 generator(EventObject): event generator. | |
331 | |
332 Raises: | |
333 IOError: when the storage writer is closed. | |
334 """ | |
335 if not self._is_open: | |
336 raise IOError('Unable to read from closed storage writer.') | |
337 | |
338 event_heap = event_heaps.EventHeap() | |
339 | |
340 for event in self._events: | |
341 if (time_range and ( | |
342 event.timestamp < time_range.start_timestamp or | |
343 event.timestamp > time_range.end_timestamp)): | |
344 continue | |
345 | |
346 # Make a copy of the event before adding the event data. | |
347 event = copy.deepcopy(event) | |
348 # TODO: refactor this into psort. | |
349 self._ReadEventDataIntoEvent(event) | |
350 | |
351 event_heap.PushEvent(event) | |
352 | |
353 return iter(event_heap.PopEvents()) | |
354 | |
355 def Open(self): | |
356 """Opens the storage writer. | |
357 | |
358 Raises: | |
359 IOError: if the storage writer is already opened. | |
360 """ | |
361 if self._is_open: | |
362 raise IOError('Storage writer already opened.') | |
363 | |
364 self._is_open = True | |
365 | |
366 self._first_written_event_source_index = len(self._event_sources) | |
367 self._written_event_source_index = self._first_written_event_source_index | |
368 | |
369 def PrepareMergeTaskStorage(self, task): | |
370 """Prepares a task storage for merging. | |
371 | |
372 Args: | |
373 task (Task): task. | |
374 | |
375 Raises: | |
376 IOError: if the task storage does not exist. | |
377 """ | |
378 if task.identifier not in self._task_storage_writers: | |
379 raise IOError('Storage writer for task: {0:s} does not exist.'.format( | |
380 task.identifier)) | |
381 | |
382 def ReadPreprocessingInformation(self, unused_knowledge_base): | |
383 """Reads preprocessing information. | |
384 | |
385 The preprocessing information contains the system configuration which | |
386 contains information about various system specific configuration data, | |
387 for example the user accounts. | |
388 | |
389 Args: | |
390 knowledge_base (KnowledgeBase): is used to store the preprocessing | |
391 information. | |
392 | |
393 Raises: | |
394 IOError: if the storage type does not support writing preprocessing | |
395 information or when the storage writer is closed. | |
396 """ | |
397 self._RaiseIfNotWritable() | |
398 | |
399 if self._storage_type != definitions.STORAGE_TYPE_SESSION: | |
400 raise IOError('Preprocessing information not supported by storage type.') | |
401 | |
402 # TODO: implement. | |
403 | |
404 def SetSerializersProfiler(self, serializers_profiler): | |
405 """Sets the serializers profiler. | |
406 | |
407 Args: | |
408 serializers_profiler (SerializersProfiler): serializers profile. | |
409 """ | |
410 pass | |
411 | |
412 def WritePreprocessingInformation(self, unused_knowledge_base): | |
413 """Writes preprocessing information. | |
414 | |
415 Args: | |
416 knowledge_base (KnowledgeBase): contains the preprocessing information. | |
417 | |
418 Raises: | |
419 IOError: if the storage type does not support writing preprocessing | |
420 information or when the storage writer is closed. | |
421 """ | |
422 self._RaiseIfNotWritable() | |
423 | |
424 if self._storage_type != definitions.STORAGE_TYPE_SESSION: | |
425 raise IOError('Preprocessing information not supported by storage type.') | |
426 | |
427 # TODO: implement. | |
428 | |
429 def WriteSessionCompletion(self, aborted=False): | |
430 """Writes session completion information. | |
431 | |
432 Args: | |
433 aborted (Optional[bool]): True if the session was aborted. | |
434 | |
435 Raises: | |
436 IOError: if the storage type does not support writing a session | |
437 completion or when the storage writer is closed. | |
438 """ | |
439 self._RaiseIfNotWritable() | |
440 | |
441 if self._storage_type != definitions.STORAGE_TYPE_SESSION: | |
442 raise IOError('Session start not supported by storage type.') | |
443 | |
444 self._session.aborted = aborted | |
445 self.session_completion = self._session.CreateSessionCompletion() | |
446 | |
447 def WriteSessionStart(self): | |
448 """Writes session start information. | |
449 | |
450 Raises: | |
451 IOError: if the storage type does not support writing a session | |
452 start or when the storage writer is closed. | |
453 """ | |
454 self._RaiseIfNotWritable() | |
455 | |
456 if self._storage_type != definitions.STORAGE_TYPE_SESSION: | |
457 raise IOError('Session start not supported by storage type.') | |
458 | |
459 self.session_start = self._session.CreateSessionStart() | |
460 | |
461 def WriteTaskCompletion(self, aborted=False): | |
462 """Writes task completion information. | |
463 | |
464 Args: | |
465 aborted (Optional[bool]): True if the session was aborted. | |
466 | |
467 Raises: | |
468 IOError: if the storage type does not support writing a task | |
469 completion or when the storage writer is closed. | |
470 """ | |
471 self._RaiseIfNotWritable() | |
472 | |
473 if self._storage_type != definitions.STORAGE_TYPE_TASK: | |
474 raise IOError('Task completion not supported by storage type.') | |
475 | |
476 self._task.aborted = aborted | |
477 self.task_completion = self._task.CreateTaskCompletion() | |
478 | |
479 def WriteTaskStart(self): | |
480 """Writes task start information. | |
481 | |
482 Raises: | |
483 IOError: if the storage type does not support writing a task | |
484 start or when the storage writer is closed. | |
485 """ | |
486 self._RaiseIfNotWritable() | |
487 | |
488 if self._storage_type != definitions.STORAGE_TYPE_TASK: | |
489 raise IOError('Task start not supported by storage type.') | |
490 | |
491 self.task_start = self._task.CreateTaskStart() | |
OLD | NEW |