Left: | ||
Right: |
OLD | NEW |
---|---|
1 #!/usr/bin/python | |
2 # -*- coding: utf-8 -*- | 1 # -*- coding: utf-8 -*- |
3 """The log2timeline command line tool.""" | 2 """The log2timeline CLI tool.""" |
4 | 3 |
5 import argparse | 4 import argparse |
6 import logging | 5 import logging |
7 import multiprocessing | |
8 import os | 6 import os |
9 import sys | 7 import sys |
10 import time | 8 import time |
11 import textwrap | 9 import textwrap |
12 | 10 |
13 from dfvfs.lib import definitions as dfvfs_definitions | 11 from dfvfs.lib import definitions as dfvfs_definitions |
12 from dfvfs.resolver import context as dfvfs_context | |
14 | 13 |
15 from plaso import dependencies | 14 import plaso |
15 | |
16 # The following import makes sure the filters are registered. | |
17 from plaso import filters # pylint: disable=unused-import | |
18 | |
19 # The following import makes sure the output modules are registered. | |
20 from plaso import output # pylint: disable=unused-import | |
21 | |
16 from plaso.cli import extraction_tool | 22 from plaso.cli import extraction_tool |
17 from plaso.cli import tools as cli_tools | 23 from plaso.cli import tools as cli_tools |
18 from plaso.cli import views as cli_views | 24 from plaso.cli import views as cli_views |
19 from plaso.frontend import log2timeline | 25 from plaso.cli import views as logging_filter |
20 from plaso.engine import configurations | 26 from plaso.engine import configurations |
27 from plaso.engine import single_process as single_process_engine | |
28 from plaso.filters import manager as filters_manager | |
29 from plaso.frontend import utils as frontend_utils | |
21 from plaso.lib import errors | 30 from plaso.lib import errors |
22 from plaso.lib import pfilter | 31 from plaso.lib import pfilter |
32 from plaso.multi_processing import task_engine as multi_process_engine | |
33 from plaso.output import manager as output_manager | |
34 from plaso.storage import zip_file as storage_zip_file | |
23 | 35 |
24 | 36 |
25 class Log2TimelineTool(extraction_tool.ExtractionTool): | 37 class Log2TimelineTool(extraction_tool.ExtractionTool): |
26 """Class that implements the log2timeline CLI tool. | 38 """Class that implements the log2timeline CLI tool. |
27 | 39 |
28 Attributes: | 40 Attributes: |
29 dependencies_check (bool): True if the availability and versions of | 41 dependencies_check (bool): True if the availability and versions of |
30 dependencies should be checked. | 42 dependencies should be checked. |
31 list_output_modules (bool): True if information about the output modules | 43 list_output_modules (bool): True if information about the output modules |
32 should be shown. | 44 should be shown. |
(...skipping 21 matching lines...) Expand all Loading... | |
54 u' log2timeline.py /cases/mycase/storage.plaso ímynd.dd', | 66 u' log2timeline.py /cases/mycase/storage.plaso ímynd.dd', |
55 u'', | 67 u'', |
56 u'Instead of answering questions, indicate some of the options on the', | 68 u'Instead of answering questions, indicate some of the options on the', |
57 u'command line (including data from particular VSS stores).', | 69 u'command line (including data from particular VSS stores).', |
58 (u' log2timeline.py -o 63 --vss_stores 1,2 /cases/plaso_vss.plaso ' | 70 (u' log2timeline.py -o 63 --vss_stores 1,2 /cases/plaso_vss.plaso ' |
59 u'image.E01'), | 71 u'image.E01'), |
60 u'', | 72 u'', |
61 u'And that is how you build a timeline using log2timeline...', | 73 u'And that is how you build a timeline using log2timeline...', |
62 u''])) | 74 u''])) |
63 | 75 |
76 _FILTERS_URL = u'https://github.com/log2timeline/plaso/wiki/Filters' | |
77 | |
78 _SOURCE_TYPES_TO_PREPROCESS = frozenset([ | |
79 dfvfs_definitions.SOURCE_TYPE_DIRECTORY, | |
80 dfvfs_definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE, | |
81 dfvfs_definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE]) | |
82 | |
64 def __init__(self, input_reader=None, output_writer=None): | 83 def __init__(self, input_reader=None, output_writer=None): |
65 """Initializes the CLI tool object. | 84 """Initializes the CLI tool object. |
66 | 85 |
67 Args: | 86 Args: |
68 input_reader (Optional[InputReader]): input reader, where None indicates | 87 input_reader (Optional[InputReader]): input reader, where None indicates |
69 that the stdin input reader should be used. | 88 that the stdin input reader should be used. |
70 output_writer (Optional[OutputWriter]): output writer, where None | 89 output_writer (Optional[OutputWriter]): output writer, where None |
71 indicates that the stdout output writer should be used. | 90 indicates that the stdout output writer should be used. |
72 """ | 91 """ |
73 super(Log2TimelineTool, self).__init__( | 92 super(Log2TimelineTool, self).__init__( |
74 input_reader=input_reader, output_writer=output_writer) | 93 input_reader=input_reader, output_writer=output_writer) |
75 self._command_line_arguments = None | 94 self._command_line_arguments = None |
76 self._enable_sigsegv_handler = False | 95 self._enable_sigsegv_handler = False |
77 self._filter_expression = None | 96 self._filter_expression = None |
78 self._front_end = log2timeline.Log2TimelineFrontend() | |
79 self._number_of_extraction_workers = 0 | 97 self._number_of_extraction_workers = 0 |
80 self._output = None | 98 self._resolver_context = dfvfs_context.Context() |
81 self._source_type = None | 99 self._source_type = None |
82 self._source_type_string = u'UNKNOWN' | 100 self._source_type_string = u'UNKNOWN' |
83 self._status_view_mode = u'linear' | 101 self._status_view_mode = u'linear' |
84 self._stdout_output_writer = isinstance( | 102 self._stdout_output_writer = isinstance( |
85 self._output_writer, cli_tools.StdoutOutputWriter) | 103 self._output_writer, cli_tools.StdoutOutputWriter) |
104 self._storage_file_path = None | |
86 self._temporary_directory = None | 105 self._temporary_directory = None |
87 self._text_prepend = None | 106 self._text_prepend = None |
107 self._use_zeromq = True | |
88 self._worker_memory_limit = None | 108 self._worker_memory_limit = None |
89 | 109 |
90 self.dependencies_check = True | 110 self.dependencies_check = True |
91 self.list_output_modules = False | 111 self.list_output_modules = False |
92 self.show_info = False | 112 self.show_info = False |
93 | 113 |
114 def _CheckStorageFile(self, storage_file_path): | |
115 """Checks if the storage file path is valid. | |
116 | |
117 Args: | |
118 storage_file_path (str): path of the storage file. | |
119 | |
120 Raises: | |
121 BadConfigOption: if the storage file path is invalid. | |
122 """ | |
123 if os.path.exists(storage_file_path): | |
124 if not os.path.isfile(storage_file_path): | |
125 raise errors.BadConfigOption( | |
126 u'Storage file: {0:s} already exists and is not a file.'.format( | |
127 storage_file_path)) | |
128 logging.warning(u'Appending to an already existing storage file.') | |
129 | |
130 dirname = os.path.dirname(storage_file_path) | |
131 if not dirname: | |
132 dirname = u'.' | |
133 | |
134 # TODO: add a more thorough check to see if the storage file really is | |
135 # a plaso storage file. | |
136 | |
137 if not os.access(dirname, os.W_OK): | |
138 raise errors.BadConfigOption( | |
139 u'Unable to write to storage file: {0:s}'.format(storage_file_path)) | |
140 | |
141 def _CreateProcessingConfiguration(self): | |
142 """Creates a processing configuration. | |
143 | |
144 Returns: | |
145 ProcessingConfiguration: processing configuration. | |
146 """ | |
147 # TODO: pass preferred_encoding. | |
148 configuration = configurations.ProcessingConfiguration() | |
149 configuration.credentials = self._credential_configurations | |
150 configuration.debug_output = self._debug_mode | |
151 configuration.event_extraction.filter_object = self._filter_object | |
152 configuration.event_extraction.text_prepend = self._text_prepend | |
153 configuration.extraction.hasher_names_string = self._hasher_names_string | |
154 configuration.extraction.process_archives = self._process_archives | |
155 configuration.extraction.process_compressed_streams = ( | |
156 self._process_compressed_streams) | |
157 configuration.extraction.yara_rules_string = self._yara_rules_string | |
158 configuration.filter_file = self._filter_file | |
159 configuration.filter_object = self._filter_object | |
160 configuration.input_source.mount_path = self._mount_path | |
161 configuration.parser_filter_expression = self._parser_filter_expression | |
162 configuration.preferred_year = self._preferred_year | |
163 configuration.profiling.directory = self._profiling_directory | |
164 configuration.profiling.sample_rate = self._profiling_sample_rate | |
165 configuration.profiling.profilers = self._profilers | |
166 configuration.temporary_directory = self._temporary_directory | |
167 | |
168 return configuration | |
169 | |
94 def _DetermineSourceType(self): | 170 def _DetermineSourceType(self): |
95 """Determines the source type.""" | 171 """Determines the source type.""" |
96 scan_context = self.ScanSource() | 172 scan_context = self.ScanSource() |
97 self._source_type = scan_context.source_type | 173 self._source_type = scan_context.source_type |
98 | 174 |
99 if self._source_type == dfvfs_definitions.SOURCE_TYPE_DIRECTORY: | 175 if self._source_type == dfvfs_definitions.SOURCE_TYPE_DIRECTORY: |
100 self._source_type_string = u'directory' | 176 self._source_type_string = u'directory' |
101 | 177 |
102 elif self._source_type == dfvfs_definitions.SOURCE_TYPE_FILE: | 178 elif self._source_type == dfvfs_definitions.SOURCE_TYPE_FILE: |
103 self._source_type_string = u'single file' | 179 self._source_type_string = u'single file' |
104 | 180 |
105 elif self._source_type == ( | 181 elif self._source_type == ( |
106 dfvfs_definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE): | 182 dfvfs_definitions.SOURCE_TYPE_STORAGE_MEDIA_DEVICE): |
107 self._source_type_string = u'storage media device' | 183 self._source_type_string = u'storage media device' |
108 | 184 |
109 elif self._source_type == ( | 185 elif self._source_type == ( |
110 dfvfs_definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE): | 186 dfvfs_definitions.SOURCE_TYPE_STORAGE_MEDIA_IMAGE): |
111 self._source_type_string = u'storage media image' | 187 self._source_type_string = u'storage media image' |
112 | 188 |
113 else: | 189 else: |
114 self._source_type_string = u'UNKNOWN' | 190 self._source_type_string = u'UNKNOWN' |
115 | 191 |
116 def _GetMatcher(self, filter_expression): | 192 def _GetFiltersInformation(self): |
117 """Retrieves a filter object for a specific filter expression. | 193 """Retrieves the filters information. |
118 | |
119 Args: | |
120 filter_expression (str): filter expression. | |
121 | 194 |
122 Returns: | 195 Returns: |
123 A filter object (instance of objectfilter.TODO) or None. | 196 list[tuple[str, str]]: pairs of filter names and docstrings. |
124 """ | 197 """ |
125 try: | 198 filters_information = [] |
126 parser = pfilter.BaseParser(filter_expression).Parse() | 199 filter_objects = filters_manager.FiltersManager.GetFilterObjects() |
127 return parser.Compile(pfilter.PlasoAttributeFilterImplementation) | 200 for filter_object in sorted(filter_objects): |
201 # TODO: refactor to use DESCRIPTION instead of docstring. | |
202 doc_string, _, _ = filter_object.__doc__.partition(u'\n') | |
203 filters_information.append((filter_object.filter_name, doc_string)) | |
128 | 204 |
129 except errors.ParseError as exception: | 205 return filters_information |
130 logging.error( | 206 |
131 u'Unable to create filter: {0:s} with error: {1:s}'.format( | 207 def _GetOutputModulesInformation(self): |
132 filter_expression, exception)) | 208 """Retrieves the output modules information. |
209 | |
210 Returns: | |
211 list[tuple[str, str]]: pairs of output module names and descriptions. | |
212 """ | |
213 output_modules_information = [] | |
214 for name, output_class in output_manager.OutputManager.GetOutputClasses(): | |
215 output_modules_information.append((name, output_class.DESCRIPTION)) | |
216 | |
217 return output_modules_information | |
218 | |
219 def _GetPluginData(self): | |
220 """Retrieves the version and various plugin information. | |
221 | |
222 Returns: | |
223 dict[str, list[str]]: available parsers and plugins. | |
224 """ | |
225 return_dict = {} | |
226 | |
227 return_dict[u'Versions'] = [ | |
228 (u'plaso engine', plaso.__version__), | |
229 (u'python', sys.version)] | |
230 | |
231 hashers_information = self._hashers_manager.GetHashersInformation() | |
232 parsers_information = self._parsers_manager.GetParsersInformation() | |
233 plugins_information = self._parsers_manager.GetParserPluginsInformation() | |
234 presets_information = self._GetParserPresetsInformation() | |
235 output_modules_information = self._GetOutputModulesInformation() | |
236 | |
237 return_dict[u'Hashers'] = hashers_information | |
238 return_dict[u'Parsers'] = parsers_information | |
239 return_dict[u'Parser Plugins'] = plugins_information | |
240 return_dict[u'Parser Presets'] = presets_information | |
241 return_dict[u'Output Modules'] = output_modules_information | |
242 return_dict[u'Filters'] = self._GetFiltersInformation() | |
243 | |
244 return return_dict | |
133 | 245 |
134 def _GetStatusUpdateCallback(self): | 246 def _GetStatusUpdateCallback(self): |
135 """Retrieves the status update callback function. | 247 """Retrieves the status update callback function. |
136 | 248 |
137 Returns: | 249 Returns: |
138 function: status update callback function or None. | 250 function: status update callback function or None. |
139 """ | 251 """ |
140 if self._status_view_mode == u'linear': | 252 if self._status_view_mode == u'linear': |
141 return self._PrintStatusUpdateStream | 253 return self._PrintStatusUpdateStream |
142 elif self._status_view_mode == u'window': | 254 elif self._status_view_mode == u'window': |
143 return self._PrintStatusUpdate | 255 return self._PrintStatusUpdate |
144 | 256 |
257 def _ParseFilterOption(self, options): | |
258 """Parses the filter option. | |
259 | |
260 Args: | |
261 options (argparse.Namespace): command line arguments. | |
262 | |
263 Raises: | |
264 BadConfigOption: if the options are invalid. | |
265 """ | |
266 filter_expression = self.ParseStringOption(options, u'filter') | |
267 if not filter_expression: | |
268 return | |
269 | |
270 try: | |
271 parser = pfilter.BaseParser(filter_expression).Parse() | |
272 filter_object = parser.Compile(pfilter.PlasoAttributeFilterImplementation) | |
273 | |
274 except errors.ParseError as exception: | |
275 raise errors.BadConfigOption( | |
276 u'Unable to create filter: {0:s} with error: {1:s}'.format( | |
277 filter_expression, exception)) | |
278 | |
279 self._filter_expression = filter_expression | |
280 self._filter_object = filter_object | |
281 | |
145 def _ParseOutputOptions(self, options): | 282 def _ParseOutputOptions(self, options): |
146 """Parses the output options. | 283 """Parses the output options. |
147 | 284 |
148 Args: | 285 Args: |
149 options (argparse.Namespace): command line arguments. | 286 options (argparse.Namespace): command line arguments. |
150 | 287 |
151 Raises: | 288 Raises: |
152 BadConfigOption: if the options are invalid. | 289 BadConfigOption: if the options are invalid. |
153 """ | 290 """ |
154 self._output_module = self.ParseStringOption(options, u'output_module') | 291 self._output_module = self.ParseStringOption(options, u'output_module') |
155 if self._output_module == u'list': | 292 if self._output_module == u'list': |
156 self.list_output_modules = True | 293 self.list_output_modules = True |
157 | 294 |
158 self._text_prepend = self.ParseStringOption(options, u'text_prepend') | 295 self._text_prepend = self.ParseStringOption(options, u'text_prepend') |
159 | 296 |
160 def _ParseProcessingOptions(self, options): | 297 def _ParseProcessingOptions(self, options): |
161 """Parses the processing options. | 298 """Parses the processing options. |
162 | 299 |
163 Args: | 300 Args: |
164 options (argparse.Namespace): command line arguments. | 301 options (argparse.Namespace): command line arguments. |
165 | 302 |
166 Raises: | 303 Raises: |
167 BadConfigOption: if the options are invalid. | 304 BadConfigOption: if the options are invalid. |
168 """ | 305 """ |
169 use_zeromq = getattr(options, u'use_zeromq', True) | 306 self._use_zeromq = getattr(options, u'use_zeromq', True) |
170 self._front_end.SetUseZeroMQ(use_zeromq) | |
171 | 307 |
172 self._single_process_mode = getattr(options, u'single_process', False) | 308 self._single_process_mode = getattr(options, u'single_process', False) |
173 | 309 |
174 self._temporary_directory = getattr(options, u'temporary_directory', None) | 310 self._temporary_directory = getattr(options, u'temporary_directory', None) |
175 if (self._temporary_directory and | 311 if (self._temporary_directory and |
176 not os.path.isdir(self._temporary_directory)): | 312 not os.path.isdir(self._temporary_directory)): |
177 raise errors.BadConfigOption( | 313 raise errors.BadConfigOption( |
178 u'No such temporary directory: {0:s}'.format( | 314 u'No such temporary directory: {0:s}'.format( |
179 self._temporary_directory)) | 315 self._temporary_directory)) |
180 | 316 |
181 self._worker_memory_limit = getattr(options, u'worker_memory_limit', None) | 317 self._worker_memory_limit = getattr(options, u'worker_memory_limit', None) |
182 self._number_of_extraction_workers = getattr(options, u'workers', 0) | 318 self._number_of_extraction_workers = getattr(options, u'workers', 0) |
183 | 319 |
184 # TODO: add code to parse the worker options. | 320 # TODO: add code to parse the worker options. |
185 | 321 |
322 def _PrintProcessingSummary(self, processing_status): | |
323 """Prints a summary of the processing. | |
324 | |
325 Args: | |
326 processing_status (ProcessingStatus): processing status. | |
327 """ | |
328 if not processing_status: | |
329 self._output_writer.Write( | |
330 u'WARNING: missing processing status information.\n') | |
331 | |
332 elif not processing_status.aborted: | |
333 if processing_status.error_path_specs: | |
334 self._output_writer.Write(u'Processing completed with errors.\n') | |
335 else: | |
336 self._output_writer.Write(u'Processing completed.\n') | |
337 | |
338 number_of_errors = ( | |
339 processing_status.foreman_status.number_of_produced_errors) | |
340 if number_of_errors: | |
341 output_text = u'\n'.join([ | |
342 u'', | |
343 (u'Number of errors encountered while extracting events: ' | |
344 u'{0:d}.').format(number_of_errors), | |
345 u'', | |
346 u'Use pinfo to inspect errors in more detail.', | |
347 u'']) | |
348 self._output_writer.Write(output_text) | |
349 | |
350 if processing_status.error_path_specs: | |
351 output_text = u'\n'.join([ | |
352 u'', | |
353 u'Path specifications that could not be processed:', | |
354 u'']) | |
355 self._output_writer.Write(output_text) | |
356 for path_spec in processing_status.error_path_specs: | |
357 self._output_writer.Write(path_spec.comparable) | |
358 self._output_writer.Write(u'\n') | |
359 | |
360 self._output_writer.Write(u'\n') | |
361 | |
186 def AddOutputOptions(self, argument_group): | 362 def AddOutputOptions(self, argument_group): |
187 """Adds the output options to the argument group. | 363 """Adds the output options to the argument group. |
188 | 364 |
189 Args: | 365 Args: |
190 argument_group (argparse._ArgumentGroup): argparse argument group. | 366 argument_group (argparse._ArgumentGroup): argparse argument group. |
191 """ | 367 """ |
192 argument_group.add_argument( | 368 argument_group.add_argument( |
193 u'--output', dest=u'output_module', action=u'store', type=str, | 369 u'--output', dest=u'output_module', action=u'store', type=str, |
194 default=u'', help=( | 370 default=u'', help=( |
195 u'Bypass the storage module directly storing events according to ' | 371 u'Bypass the storage module directly storing events according to ' |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
235 dest=u'worker_memory_limit', action=u'store', type=int, | 411 dest=u'worker_memory_limit', action=u'store', type=int, |
236 metavar=u'SIZE', help=( | 412 metavar=u'SIZE', help=( |
237 u'Maximum amount of memory a worker process is allowed to consume. ' | 413 u'Maximum amount of memory a worker process is allowed to consume. ' |
238 u'[defaults to 2 GiB]')) | 414 u'[defaults to 2 GiB]')) |
239 | 415 |
240 argument_group.add_argument( | 416 argument_group.add_argument( |
241 u'--workers', dest=u'workers', action=u'store', type=int, default=0, | 417 u'--workers', dest=u'workers', action=u'store', type=int, default=0, |
242 help=(u'The number of worker processes [defaults to available system ' | 418 help=(u'The number of worker processes [defaults to available system ' |
243 u'CPUs minus one].')) | 419 u'CPUs minus one].')) |
244 | 420 |
245 def ListHashers(self): | |
246 """Lists information about the available hashers.""" | |
247 hashers_information = self._front_end.GetHashersInformation() | |
248 | |
249 table_view = cli_views.ViewsFactory.GetTableView( | |
250 self._views_format_type, column_names=[u'Name', u'Description'], | |
251 title=u'Hashers') | |
252 | |
253 for name, description in sorted(hashers_information): | |
254 table_view.AddRow([name, description]) | |
255 table_view.Write(self._output_writer) | |
256 | |
257 def ListOutputModules(self): | 421 def ListOutputModules(self): |
258 """Lists the output modules.""" | 422 """Lists the output modules.""" |
259 table_view = cli_views.ViewsFactory.GetTableView( | 423 table_view = cli_views.ViewsFactory.GetTableView( |
260 self._views_format_type, column_names=[u'Name', u'Description'], | 424 self._views_format_type, column_names=[u'Name', u'Description'], |
261 title=u'Output Modules') | 425 title=u'Output Modules') |
262 for name, output_class in self._front_end.GetOutputClasses(): | 426 |
427 output_classes = list(output_manager.OutputManager.GetOutputClasses()) | |
428 for name, output_class in output_classes: | |
263 table_view.AddRow([name, output_class.DESCRIPTION]) | 429 table_view.AddRow([name, output_class.DESCRIPTION]) |
264 table_view.Write(self._output_writer) | 430 table_view.Write(self._output_writer) |
265 | 431 |
266 disabled_classes = list(self._front_end.GetDisabledOutputClasses()) | 432 disabled_classes = list( |
433 output_manager.OutputManager.GetDisabledOutputClasses()) | |
267 if not disabled_classes: | 434 if not disabled_classes: |
268 return | 435 return |
269 | 436 |
270 table_view = cli_views.ViewsFactory.GetTableView( | 437 table_view = cli_views.ViewsFactory.GetTableView( |
271 self._views_format_type, column_names=[u'Name', u'Description'], | 438 self._views_format_type, column_names=[u'Name', u'Description'], |
272 title=u'Disabled Output Modules') | 439 title=u'Disabled Output Modules') |
273 for name, output_class in disabled_classes: | 440 for name, output_class in disabled_classes: |
274 table_view.AddRow([name, output_class.DESCRIPTION]) | 441 table_view.AddRow([name, output_class.DESCRIPTION]) |
275 table_view.Write(self._output_writer) | 442 table_view.Write(self._output_writer) |
276 | 443 |
277 def ListParsersAndPlugins(self): | |
278 """Lists information about the available parsers and plugins.""" | |
279 parsers_information = self._front_end.GetParsersInformation() | |
280 | |
281 table_view = cli_views.ViewsFactory.GetTableView( | |
282 self._views_format_type, column_names=[u'Name', u'Description'], | |
283 title=u'Parsers') | |
284 | |
285 for name, description in sorted(parsers_information): | |
286 table_view.AddRow([name, description]) | |
287 table_view.Write(self._output_writer) | |
288 | |
289 for parser_name in self._front_end.GetNamesOfParsersWithPlugins(): | |
290 plugins_information = self._front_end.GetParserPluginsInformation( | |
291 parser_filter_expression=parser_name) | |
292 | |
293 table_title = u'Parser plugins: {0:s}'.format(parser_name) | |
294 table_view = cli_views.ViewsFactory.GetTableView( | |
295 self._views_format_type, column_names=[u'Name', u'Description'], | |
296 title=table_title) | |
297 for name, description in sorted(plugins_information): | |
298 table_view.AddRow([name, description]) | |
299 table_view.Write(self._output_writer) | |
300 | |
301 presets_information = self._front_end.GetParserPresetsInformation() | |
302 | |
303 table_view = cli_views.ViewsFactory.GetTableView( | |
304 self._views_format_type, column_names=[u'Name', u'Parsers and plugins'], | |
305 title=u'Parser presets') | |
306 for name, description in sorted(presets_information): | |
307 table_view.AddRow([name, description]) | |
308 table_view.Write(self._output_writer) | |
309 | |
310 def ParseArguments(self): | 444 def ParseArguments(self): |
311 """Parses the command line arguments. | 445 """Parses the command line arguments. |
312 | 446 |
313 Returns: | 447 Returns: |
314 bool: True if the arguments were successfully parsed. | 448 bool: True if the arguments were successfully parsed. |
315 """ | 449 """ |
316 self._ConfigureLogging() | 450 self._ConfigureLogging() |
317 | 451 |
318 argument_parser = argparse.ArgumentParser( | 452 argument_parser = argparse.ArgumentParser( |
319 description=self.DESCRIPTION, epilog=self.EPILOG, add_help=False, | 453 description=self.DESCRIPTION, epilog=self.EPILOG, add_help=False, |
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
396 default=None, type=str, help=( | 530 default=None, type=str, help=( |
397 u'The path to the source device, file or directory. If the source ' | 531 u'The path to the source device, file or directory. If the source ' |
398 u'is a supported storage media device or image file, archive file ' | 532 u'is a supported storage media device or image file, archive file ' |
399 u'or a directory, the files within are processed recursively.')) | 533 u'or a directory, the files within are processed recursively.')) |
400 | 534 |
401 argument_parser.add_argument( | 535 argument_parser.add_argument( |
402 u'filter', action=u'store', metavar=u'FILTER', nargs=u'?', default=None, | 536 u'filter', action=u'store', metavar=u'FILTER', nargs=u'?', default=None, |
403 type=str, help=( | 537 type=str, help=( |
404 u'A filter that can be used to filter the dataset before it ' | 538 u'A filter that can be used to filter the dataset before it ' |
405 u'is written into storage. More information about the filters ' | 539 u'is written into storage. More information about the filters ' |
406 u'and its usage can be found here: http://plaso.kiddaland.' | 540 u'and its usage can be found here: {0:s}').format( |
407 u'net/usage/filters')) | 541 self._FILTERS_URL)) |
408 | 542 |
409 try: | 543 try: |
410 options = argument_parser.parse_args() | 544 options = argument_parser.parse_args() |
411 except UnicodeEncodeError: | 545 except UnicodeEncodeError: |
412 # If we get here we are attempting to print help in a non-Unicode | 546 # If we get here we are attempting to print help in a non-Unicode |
413 # terminal. | 547 # terminal. |
414 self._output_writer.Write(u'\n') | 548 self._output_writer.Write(u'\n') |
415 self._output_writer.Write(argument_parser.format_help()) | 549 self._output_writer.Write(argument_parser.format_help()) |
416 return False | 550 return False |
417 | 551 |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
486 logging_level = logging.WARNING | 620 logging_level = logging.WARNING |
487 else: | 621 else: |
488 logging_level = logging.INFO | 622 logging_level = logging.INFO |
489 | 623 |
490 self.ParseLogFileOptions(options) | 624 self.ParseLogFileOptions(options) |
491 self._ConfigureLogging( | 625 self._ConfigureLogging( |
492 filename=self._log_file, format_string=format_string, | 626 filename=self._log_file, format_string=format_string, |
493 log_level=logging_level) | 627 log_level=logging_level) |
494 | 628 |
495 if self._debug_mode: | 629 if self._debug_mode: |
496 logging_filter = log2timeline.LoggingFilter() | 630 log_filter = logging_filter.LoggingFilter() |
497 root_logger = logging.getLogger() | 631 root_logger = logging.getLogger() |
498 root_logger.addFilter(logging_filter) | 632 root_logger.addFilter(log_filter) |
499 | 633 |
500 self._output = self.ParseStringOption(options, u'output') | 634 self._storage_file_path = self.ParseStringOption(options, u'output') |
501 if not self._output: | 635 if not self._storage_file_path: |
502 raise errors.BadConfigOption(u'No output defined.') | 636 raise errors.BadConfigOption(u'No storage file path defined.') |
503 | 637 |
504 # TODO: where is this defined? | 638 # TODO: where is this defined? |
505 self._operating_system = getattr(options, u'os', None) | 639 self._operating_system = getattr(options, u'os', None) |
506 | 640 |
507 if self._operating_system: | 641 if self._operating_system: |
508 self._mount_path = getattr(options, u'filename', None) | 642 self._mount_path = getattr(options, u'filename', None) |
509 | 643 |
510 self._filter_expression = self.ParseStringOption(options, u'filter') | 644 self._ParseFilterOption(options) |
511 if self._filter_expression: | |
512 # TODO: refactor self._filter_object out the tool into the frontend. | |
513 self._filter_object = self._GetMatcher(self._filter_expression) | |
514 if not self._filter_object: | |
515 raise errors.BadConfigOption( | |
516 u'Invalid filter expression: {0:s}'.format(self._filter_expression)) | |
517 | 645 |
518 self._status_view_mode = getattr(options, u'status_view_mode', u'linear') | 646 self._status_view_mode = getattr(options, u'status_view_mode', u'linear') |
519 self._enable_sigsegv_handler = getattr(options, u'sigsegv_handler', False) | 647 self._enable_sigsegv_handler = getattr(options, u'sigsegv_handler', False) |
520 | 648 |
521 def ProcessSources(self): | 649 def _PreprocessSources(self, engine): |
522 """Processes the sources. | 650 """Preprocesses the sources. |
651 | |
652 Args: | |
653 engine (BaseEngine): engine to preprocess the sources. | |
654 """ | |
655 logging.debug(u'Starting preprocessing.') | |
656 | |
657 try: | |
658 engine.PreprocessSources( | |
659 self._source_path_specs, resolver_context=self._resolver_context) | |
660 | |
661 except IOError as exception: | |
662 logging.error(u'Unable to preprocess with error: {0:s}'.format(exception)) | |
663 | |
664 logging.debug(u'Preprocessing done.') | |
665 | |
666 def ExtractEventsFromSources(self): | |
667 """Processes the sources and extract events. | |
onager
2017/05/26 23:53:46
extracts
Joachim Metz
2017/05/27 07:01:36
Done.
| |
523 | 668 |
524 Raises: | 669 Raises: |
670 BadConfigOption: if the storage file path is invalid. | |
525 SourceScannerError: if the source scanner could not find a supported | 671 SourceScannerError: if the source scanner could not find a supported |
526 file system. | 672 file system. |
527 UserAbort: if the user initiated an abort. | 673 UserAbort: if the user initiated an abort. |
528 """ | 674 """ |
675 self._CheckStorageFile(self._storage_file_path) | |
676 | |
529 self._DetermineSourceType() | 677 self._DetermineSourceType() |
530 | 678 |
531 self._output_writer.Write(u'\n') | 679 self._output_writer.Write(u'\n') |
532 self._PrintStatusHeader() | 680 self._PrintStatusHeader() |
533 | |
534 self._output_writer.Write(u'Processing started.\n') | 681 self._output_writer.Write(u'Processing started.\n') |
535 | 682 |
536 status_update_callback = self._GetStatusUpdateCallback() | 683 single_process_mode = self._single_process_mode |
684 if self._source_type == dfvfs_definitions.SOURCE_TYPE_FILE: | |
685 # No need to multi process a single file source. | |
686 single_process_mode = True | |
537 | 687 |
538 session = self._front_end.CreateSession( | 688 if single_process_mode: |
689 engine = single_process_engine.SingleProcessEngine() | |
690 else: | |
691 engine = multi_process_engine.TaskMultiProcessEngine( | |
692 use_zeromq=self._use_zeromq) | |
693 | |
694 session = engine.CreateSession( | |
539 command_line_arguments=self._command_line_arguments, | 695 command_line_arguments=self._command_line_arguments, |
696 debug_mode=self._debug_mode, | |
697 filter_expression=self._filter_expression, | |
540 filter_file=self._filter_file, | 698 filter_file=self._filter_file, |
541 preferred_encoding=self.preferred_encoding, | 699 preferred_encoding=self.preferred_encoding, |
542 preferred_time_zone=self._preferred_time_zone, | 700 preferred_time_zone=self._preferred_time_zone, |
543 preferred_year=self._preferred_year) | 701 preferred_year=self._preferred_year) |
544 | 702 |
545 storage_writer = self._front_end.CreateStorageWriter(session, self._output) | 703 storage_writer = storage_zip_file.ZIPStorageFileWriter( |
546 # TODO: handle errors.BadConfigOption | 704 session, self._storage_file_path) |
547 | 705 |
548 # TODO: pass preferred_encoding. | 706 # If the source is a directory or a storage media image |
onager
2017/05/26 23:53:46
Remove this comment, as it isn't needed any more (
Joachim Metz
2017/05/27 07:01:36
Done.
| |
549 configuration = configurations.ProcessingConfiguration() | 707 # run pre-processing. |
550 configuration.credentials = self._credential_configurations | 708 if (self._force_preprocessing or |
551 configuration.debug_output = self._debug_mode | 709 self._source_type in self._SOURCE_TYPES_TO_PREPROCESS): |
552 configuration.event_extraction.filter_object = self._filter_object | 710 self._PreprocessSources(engine) |
553 configuration.event_extraction.text_prepend = self._text_prepend | |
554 configuration.extraction.hasher_names_string = self._hasher_names_string | |
555 configuration.extraction.process_archives = self._process_archives | |
556 configuration.extraction.process_compressed_streams = ( | |
557 self._process_compressed_streams) | |
558 configuration.extraction.yara_rules_string = self._yara_rules_string | |
559 configuration.filter_file = self._filter_file | |
560 configuration.filter_object = self._filter_object | |
561 configuration.input_source.mount_path = self._mount_path | |
562 configuration.parser_filter_expression = self._parser_filter_expression | |
563 configuration.preferred_year = self._preferred_year | |
564 configuration.profiling.directory = self._profiling_directory | |
565 configuration.profiling.sample_rate = self._profiling_sample_rate | |
566 configuration.profiling.profilers = self._profilers | |
567 configuration.temporary_directory = self._temporary_directory | |
568 | 711 |
569 processing_status = self._front_end.ProcessSources( | 712 configuration = self._CreateProcessingConfiguration() |
570 session, storage_writer, self._source_path_specs, self._source_type, | |
571 configuration, enable_sigsegv_handler=self._enable_sigsegv_handler, | |
572 force_preprocessing=self._force_preprocessing, | |
573 number_of_extraction_workers=self._number_of_extraction_workers, | |
574 single_process_mode=self._single_process_mode, | |
575 status_update_callback=status_update_callback, | |
576 worker_memory_limit=self._worker_memory_limit) | |
577 | 713 |
578 if not processing_status: | 714 if not configuration.parser_filter_expression: |
579 self._output_writer.Write( | 715 operating_system = engine.knowledge_base.GetValue( |
580 u'WARNING: missing processing status information.\n') | 716 u'operating_system') |
717 operating_system_product = engine.knowledge_base.GetValue( | |
718 u'operating_system_product') | |
719 operating_system_version = engine.knowledge_base.GetValue( | |
720 u'operating_system_version') | |
721 parser_filter_expression = ( | |
722 self._parsers_manager.GetPresetForOperatingSystem( | |
723 operating_system, operating_system_product, | |
724 operating_system_version)) | |
581 | 725 |
582 elif not processing_status.aborted: | 726 if parser_filter_expression: |
583 if processing_status.error_path_specs: | 727 logging.info(u'Parser filter expression changed to: {0:s}'.format( |
584 self._output_writer.Write(u'Processing completed with errors.\n') | 728 parser_filter_expression)) |
585 else: | |
586 self._output_writer.Write(u'Processing completed.\n') | |
587 | 729 |
588 number_of_errors = ( | 730 configuration.parser_filter_expression = parser_filter_expression |
589 processing_status.foreman_status.number_of_produced_errors) | |
590 if number_of_errors: | |
591 output_text = u'\n'.join([ | |
592 u'', | |
593 (u'Number of errors encountered while extracting events: ' | |
594 u'{0:d}.').format(number_of_errors), | |
595 u'', | |
596 u'Use pinfo to inspect errors in more detail.', | |
597 u'']) | |
598 self._output_writer.Write(output_text) | |
599 | 731 |
600 if processing_status.error_path_specs: | 732 names_generator = self._parsers_manager.GetParserAndPluginNames( |
601 output_text = u'\n'.join([ | 733 parser_filter_expression=parser_filter_expression) |
602 u'', | |
603 u'Path specifications that could not be processed:', | |
604 u'']) | |
605 self._output_writer.Write(output_text) | |
606 for path_spec in processing_status.error_path_specs: | |
607 self._output_writer.Write(path_spec.comparable) | |
608 self._output_writer.Write(u'\n') | |
609 | 734 |
610 self._output_writer.Write(u'\n') | 735 session.enabled_parser_names = list(names_generator) |
736 session.parser_filter_expression = parser_filter_expression | |
737 | |
738 if session.preferred_time_zone: | |
739 try: | |
740 engine.knowledge_base.SetTimeZone(session.preferred_time_zone) | |
741 except ValueError: | |
742 logging.warning( | |
743 u'Unsupported time zone: {0:s}, defaulting to {1:s}'.format( | |
744 session.preferred_time_zone, | |
745 engine.knowledge_base.time_zone.zone)) | |
746 | |
747 filter_find_specs = None | |
748 if configuration.filter_file: | |
749 environment_variables = engine.knowledge_base.GetEnvironmentVariables() | |
750 filter_find_specs = frontend_utils.BuildFindSpecsFromFile( | |
751 configuration.filter_file, | |
752 environment_variables=environment_variables) | |
753 | |
754 status_update_callback = self._GetStatusUpdateCallback() | |
755 | |
756 processing_status = None | |
757 if single_process_mode: | |
758 logging.debug(u'Starting extraction in single process mode.') | |
759 | |
760 processing_status = engine.ProcessSources( | |
761 self._source_path_specs, storage_writer, self._resolver_context, | |
762 configuration, filter_find_specs=filter_find_specs, | |
763 status_update_callback=status_update_callback) | |
764 | |
765 else: | |
766 logging.debug(u'Starting extraction in multi process mode.') | |
767 | |
768 processing_status = engine.ProcessSources( | |
769 session.identifier, self._source_path_specs, storage_writer, | |
770 configuration, enable_sigsegv_handler=self._enable_sigsegv_handler, | |
771 filter_find_specs=filter_find_specs, | |
772 number_of_worker_processes=self._number_of_extraction_workers, | |
773 status_update_callback=status_update_callback, | |
774 worker_memory_limit=self._worker_memory_limit) | |
775 | |
776 self._PrintProcessingSummary(processing_status) | |
611 | 777 |
612 def ShowInfo(self): | 778 def ShowInfo(self): |
613 """Shows information about available hashers, parsers, plugins, etc.""" | 779 """Shows information about available hashers, parsers, plugins, etc.""" |
614 self._output_writer.Write( | 780 self._output_writer.Write( |
615 u'{0:=^80s}\n'.format(u' log2timeline/plaso information ')) | 781 u'{0:=^80s}\n'.format(u' log2timeline/plaso information ')) |
616 | 782 |
617 plugin_list = self._front_end.GetPluginData() | 783 plugin_list = self._GetPluginData() |
618 for header, data in plugin_list.items(): | 784 for header, data in plugin_list.items(): |
619 table_view = cli_views.ViewsFactory.GetTableView( | 785 table_view = cli_views.ViewsFactory.GetTableView( |
620 self._views_format_type, column_names=[u'Name', u'Description'], | 786 self._views_format_type, column_names=[u'Name', u'Description'], |
621 title=header) | 787 title=header) |
622 for entry_header, entry_data in sorted(data): | 788 for entry_header, entry_data in sorted(data): |
623 table_view.AddRow([entry_header, entry_data]) | 789 table_view.AddRow([entry_header, entry_data]) |
624 table_view.Write(self._output_writer) | 790 table_view.Write(self._output_writer) |
625 | |
626 | |
627 def Main(): | |
628 """The main function.""" | |
629 multiprocessing.freeze_support() | |
630 | |
631 tool = Log2TimelineTool() | |
632 | |
633 if not tool.ParseArguments(): | |
634 return False | |
635 | |
636 if tool.show_info: | |
637 tool.ShowInfo() | |
638 return True | |
639 | |
640 have_list_option = False | |
641 if tool.list_hashers: | |
642 tool.ListHashers() | |
643 have_list_option = True | |
644 | |
645 if tool.list_parsers_and_plugins: | |
646 tool.ListParsersAndPlugins() | |
647 have_list_option = True | |
648 | |
649 if tool.list_output_modules: | |
650 tool.ListOutputModules() | |
651 have_list_option = True | |
652 | |
653 if tool.list_profilers: | |
654 tool.ListProfilers() | |
655 have_list_option = True | |
656 | |
657 if tool.list_timezones: | |
658 tool.ListTimeZones() | |
659 have_list_option = True | |
660 | |
661 if have_list_option: | |
662 return True | |
663 | |
664 if tool.dependencies_check and not dependencies.CheckDependencies( | |
665 verbose_output=False): | |
666 return False | |
667 | |
668 try: | |
669 tool.ProcessSources() | |
670 | |
671 except (KeyboardInterrupt, errors.UserAbort): | |
672 logging.warning(u'Aborted by user.') | |
673 return False | |
674 | |
675 except (errors.BadConfigOption, errors.SourceScannerError) as exception: | |
676 logging.warning(exception) | |
677 return False | |
678 | |
679 return True | |
680 | |
681 | |
682 if __name__ == '__main__': | |
683 if not Main(): | |
684 sys.exit(1) | |
685 else: | |
686 sys.exit(0) | |
OLD | NEW |