Left: | ||
Right: |
OLD | NEW |
---|---|
1 # -*- coding: utf-8 -*- | 1 # -*- coding: utf-8 -*- |
2 """The event extraction worker.""" | 2 """The event extraction worker.""" |
3 | 3 |
4 from __future__ import unicode_literals | 4 from __future__ import unicode_literals |
5 | 5 |
6 import copy | 6 import copy |
7 import logging | 7 import logging |
8 import os | 8 import os |
9 import re | 9 import re |
10 import time | 10 import time |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
63 '/$BadBlockFile', | 63 '/$BadBlockFile', |
64 '/$AllocationFile', | 64 '/$AllocationFile', |
65 '/$AttributesFile', | 65 '/$AttributesFile', |
66 ]) | 66 ]) |
67 | 67 |
68 # TODO: make this filtering solution more generic. Also see: | 68 # TODO: make this filtering solution more generic. Also see: |
69 # https://github.com/log2timeline/plaso/issues/467 | 69 # https://github.com/log2timeline/plaso/issues/467 |
70 _CHROME_CACHE_DATA_FILE_RE = re.compile(r'^[fF]_[0-9a-fA-F]{6}$') | 70 _CHROME_CACHE_DATA_FILE_RE = re.compile(r'^[fF]_[0-9a-fA-F]{6}$') |
71 _FIREFOX_CACHE_DATA_FILE_RE = re.compile(r'^[0-9a-fA-F]{5}[dm][0-9]{2}$') | 71 _FIREFOX_CACHE_DATA_FILE_RE = re.compile(r'^[0-9a-fA-F]{5}[dm][0-9]{2}$') |
72 _FIREFOX_CACHE2_DATA_FILE_RE = re.compile(r'^[0-9a-fA-F]{40}$') | 72 _FIREFOX_CACHE2_DATA_FILE_RE = re.compile(r'^[0-9a-fA-F]{40}$') |
73 _FSEVENTSD_FILE_RE = re.compile(r'^[0-9a-fA-F]{16}$') | |
74 | 73 |
75 _TYPES_WITH_ROOT_METADATA = frozenset([ | 74 _TYPES_WITH_ROOT_METADATA = frozenset([ |
76 dfvfs_definitions.TYPE_INDICATOR_GZIP]) | 75 dfvfs_definitions.TYPE_INDICATOR_GZIP]) |
77 | 76 |
78 def __init__(self, parser_filter_expression=None): | 77 def __init__(self, parser_filter_expression=None): |
79 """Initializes an event extraction worker. | 78 """Initializes an event extraction worker. |
80 | 79 |
81 Args: | 80 Args: |
82 parser_filter_expression (Optional[str]): parser filter expression, | 81 parser_filter_expression (Optional[str]): parser filter expression, |
83 where None represents all parsers and plugins. | 82 where None represents all parsers and plugins. |
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
276 location = file_system.JoinPath(location_segments) | 275 location = file_system.JoinPath(location_segments) |
277 index_path_spec = path_spec_factory.Factory.NewPathSpec( | 276 index_path_spec = path_spec_factory.Factory.NewPathSpec( |
278 file_entry.type_indicator, location=location, | 277 file_entry.type_indicator, location=location, |
279 parent=file_entry.path_spec.parent) | 278 parent=file_entry.path_spec.parent) |
280 | 279 |
281 if file_system.FileEntryExistsByPathSpec(index_path_spec): | 280 if file_system.FileEntryExistsByPathSpec(index_path_spec): |
282 # TODO: improve this check if "index" is a Firefox Cache version 2 | 281 # TODO: improve this check if "index" is a Firefox Cache version 2 |
283 # index file. | 282 # index file. |
284 return True | 283 return True |
285 | 284 |
286 elif self._FSEVENTSD_FILE_RE.match(path_segments[-1]): | |
287 if len(path_segments) == 2 and path_segments[0].lower() == '.fseventsd': | |
Joachim Metz
2018/01/19 05:55:01
What if the fseventsd parser is disabled?
onager
2018/01/19 09:06:38
Then we treat them like any other file. The only r
Joachim Metz
2018/01/19 09:25:31
my concern is also does that affect the performanc
onager
2018/01/19 10:39:23
Probably, but it's not clear how much effect this
Joachim Metz
2018/01/20 16:52:16
I opt we test this, but separate from this CL.
onager
2018/01/21 01:59:13
Acknowledged.
| |
288 return True | |
289 | |
290 elif len(path_segments) == 1 and path_segments[0].lower() in ( | 285 elif len(path_segments) == 1 and path_segments[0].lower() in ( |
291 'hiberfil.sys', 'pagefile.sys', 'swapfile.sys'): | 286 'hiberfil.sys', 'pagefile.sys', 'swapfile.sys'): |
292 return True | 287 return True |
293 | 288 |
294 return False | 289 return False |
295 | 290 |
296 def _ExtractContentFromDataStream( | 291 def _ExtractContentFromDataStream( |
297 self, mediator, file_entry, data_stream_name): | 292 self, mediator, file_entry, data_stream_name): |
298 """Extracts content from a data stream. | 293 """Extracts content from a data stream. |
299 | 294 |
(...skipping 525 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
825 """Sets the parsers profiler. | 820 """Sets the parsers profiler. |
826 | 821 |
827 Args: | 822 Args: |
828 processing_profiler (ProcessingProfiler): processing profile. | 823 processing_profiler (ProcessingProfiler): processing profile. |
829 """ | 824 """ |
830 self._processing_profiler = processing_profiler | 825 self._processing_profiler = processing_profiler |
831 | 826 |
832 def SignalAbort(self): | 827 def SignalAbort(self): |
833 """Signals the extraction worker to abort.""" | 828 """Signals the extraction worker to abort.""" |
834 self._abort = True | 829 self._abort = True |
OLD | NEW |