Skip to content

Commit

Permalink
Code review: 243260043: Changes for negative seeks #218
Browse files Browse the repository at this point in the history
  • Loading branch information
joachimmetz committed Jun 12, 2015
1 parent 8b9be8e commit 2e78e48
Show file tree
Hide file tree
Showing 3 changed files with 37 additions and 11 deletions.
2 changes: 1 addition & 1 deletion config/dpkg/changelog
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@ python-plaso (1.3.0-1) unstable; urgency=low

* Auto-generated

-- Log2Timeline <[email protected]> Fri, 12 Jun 2015 16:06:21 +0200
-- Log2Timeline <[email protected]> Fri, 12 Jun 2015 16:09:01 +0200
42 changes: 32 additions & 10 deletions plaso/engine/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ class BaseEventExtractionWorker(queue.ItemQueueConsumer):
are pushed on a storage queue for further processing.
"""

DEFAULT_HASH_READ_SIZE = 4096
_DEFAULT_HASH_READ_SIZE = 4096

def __init__(
self, identifier, path_spec_queue, event_queue_producer,
Expand Down Expand Up @@ -143,7 +143,7 @@ def _HashFileEntry(self, file_entry):
if not file_entry.IsFile() or not self._hasher_names:
return

logging.debug(u'[ProcessFileEntry] hashing file: {0:s}'.format(
logging.debug(u'[HashFileEntry] hashing file: {0:s}'.format(
self._current_display_name))

hasher_objects = hashers_manager.HashersManager.GetHasherObjects(
Expand All @@ -154,11 +154,11 @@ def _HashFileEntry(self, file_entry):
file_object.seek(0, os.SEEK_SET)

# We only do one read, then pass it to each of the hashers in turn.
data = file_object.read(self.DEFAULT_HASH_READ_SIZE)
data = file_object.read(self._DEFAULT_HASH_READ_SIZE)
while data:
for hasher in hasher_objects:
hasher.Update(data)
data = file_object.read(self.DEFAULT_HASH_READ_SIZE)
data = file_object.read(self._DEFAULT_HASH_READ_SIZE)

finally:
file_object.close()
Expand Down Expand Up @@ -199,9 +199,17 @@ def _ParseFileEntryWithParser(self, parser_object, file_entry):
try:
parser_object.UpdateChainAndParse(self._parser_mediator)

# We catch the IOError so we can determine the parser that generated
# the error.
except IOError as exception:
logging.warning(
u'{0:s} unable to parse file: {1:s} with error: {2:s}'.format(
parser_object.NAME, self._current_display_name, exception))

except errors.UnableToParseFile as exception:
logging.debug(u'Not a {0:s} file ({1:s}) - {2:s}'.format(
parser_object.NAME, file_entry.name, exception))
logging.debug(
u'{0:s} unable to parse file: {1:s} with error: {2:s}'.format(
parser_object.NAME, self._current_display_name, exception))

finally:
if self._parsers_profiler:
Expand All @@ -222,8 +230,15 @@ def _ProcessArchiveFile(self, file_entry):
Returns:
A boolean indicating if the file is an archive file.
"""
type_indicators = analyzer.Analyzer.GetArchiveTypeIndicators(
file_entry.path_spec, resolver_context=self._resolver_context)
try:
type_indicators = analyzer.Analyzer.GetArchiveTypeIndicators(
file_entry.path_spec, resolver_context=self._resolver_context)
except IOError as exception:
logging.warning((
u'Analyzer failed to determine archive type indicators '
u'for file: {0:s} with error: {1:s}').format(
self._current_display_name, exception))
return False

number_of_type_indicators = len(type_indicators)
if number_of_type_indicators == 0:
Expand Down Expand Up @@ -288,8 +303,15 @@ def _ProcessCompressedStreamFile(self, file_entry):
Returns:
A boolean indicating if the file is a compressed stream file.
"""
type_indicators = analyzer.Analyzer.GetCompressedStreamTypeIndicators(
file_entry.path_spec, resolver_context=self._resolver_context)
try:
type_indicators = analyzer.Analyzer.GetCompressedStreamTypeIndicators(
file_entry.path_spec, resolver_context=self._resolver_context)
except IOError as exception:
logging.warning((
u'Analyzer failed to determine compressed stream type indicators '
u'for file: {0:s} with error: {1:s}').format(
self._current_display_name, exception))
return False

number_of_type_indicators = len(type_indicators)
if number_of_type_indicators == 0:
Expand Down
4 changes: 4 additions & 0 deletions plaso/parsers/firefox_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,10 @@ def ParseFileObject(self, parser_mediator, file_object, **kwargs):
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
# TODO: determine if the minimum file size is really 4 bytes.
if file_object.get_size() < 4:
raise errors.UnableToParseFile(u'Not a Firefox cache2 file.')

file_entry = parser_mediator.GetFileEntry()

try:
Expand Down

0 comments on commit 2e78e48

Please sign in to comment.