Commit 8f55b649acf9109e3b9abf28738c8e6ab0ee6c40
1 parent
f47d572f
msodde: reconcile json with logging
Log output is not json-compatible, so warn if -ldebug -j is given to msodde. (Could create a logger formatter that always creates json-compatible output but that is probably overkill right now) Replace old commented debug-prints with log.debug() or remove them.
Showing
1 changed file
with
14 additions
and
22 deletions
oletools/msodde.py
| ... | ... | @@ -185,10 +185,10 @@ def process_args(cmd_line_args=None): |
| 185 | 185 | parser.add_argument("filepath", help="path of the file to be analyzed", |
| 186 | 186 | type=existing_file, metavar='FILE') |
| 187 | 187 | parser.add_argument("--json", '-j', action='store_true', |
| 188 | - help="Output in json format") | |
| 188 | + help="Output in json format. Do not use with -ldebug") | |
| 189 | 189 | parser.add_argument("--nounquote", help="don't unquote values",action='store_true') |
| 190 | 190 | parser.add_argument('-l', '--loglevel', dest="loglevel", action="store", default=DEFAULT_LOG_LEVEL, |
| 191 | - help="logging level debug/info/warning/error/critical (default=warning)") | |
| 191 | + help="logging level debug/info/warning/error/critical (default=%(default)s)") | |
| 192 | 192 | |
| 193 | 193 | return parser.parse_args(cmd_line_args) |
| 194 | 194 | |
| ... | ... | @@ -212,10 +212,10 @@ def process_ole_field(data): |
| 212 | 212 | """ check if field instructions start with DDE |
| 213 | 213 | |
| 214 | 214 | expects unicode input, returns unicode output (empty if not dde) """ |
| 215 | - #print('processing field {0}'.format(data)) | |
| 215 | + #log.debug('processing field {0}'.format(data)) | |
| 216 | 216 | |
| 217 | 217 | if data.lstrip().lower().startswith(u'dde'): |
| 218 | - #print('--> is DDE!') | |
| 218 | + #log.debug('--> is DDE!') | |
| 219 | 219 | return data |
| 220 | 220 | else: |
| 221 | 221 | return u'' |
| ... | ... | @@ -248,10 +248,6 @@ def process_ole_stream(stream): |
| 248 | 248 | char = ord(char) |
| 249 | 249 | |
| 250 | 250 | if char == OLE_FIELD_START: |
| 251 | - #print('DEBUG: have start at {}'.format(idx)) | |
| 252 | - #if have_start: | |
| 253 | - # print("DEBUG: dismissing previous contents of length {}" | |
| 254 | - # .format(len(field_contents))) | |
| 255 | 251 | have_start = True |
| 256 | 252 | have_sep = False |
| 257 | 253 | max_size_exceeded = False |
| ... | ... | @@ -262,11 +258,8 @@ def process_ole_stream(stream): |
| 262 | 258 | |
| 263 | 259 | # now we are after start char but not at end yet |
| 264 | 260 | if char == OLE_FIELD_SEP: |
| 265 | - #print('DEBUG: have sep at {}'.format(idx)) | |
| 266 | 261 | have_sep = True |
| 267 | 262 | elif char == OLE_FIELD_END: |
| 268 | - #print('DEBUG: have end at {}'.format(idx)) | |
| 269 | - | |
| 270 | 263 | # have complete field now, process it |
| 271 | 264 | result_parts.append(process_ole_field(field_contents)) |
| 272 | 265 | |
| ... | ... | @@ -279,20 +272,18 @@ def process_ole_stream(stream): |
| 279 | 272 | if max_size_exceeded: |
| 280 | 273 | pass |
| 281 | 274 | elif len(field_contents) > OLE_FIELD_MAX_SIZE: |
| 282 | - #print('DEBUG: exceeded max size') | |
| 275 | + log.debug('field exceeds max size of {0}. Ignore rest' | |
| 276 | + .format(OLE_FIELD_MAX_SIZE)) | |
| 283 | 277 | max_size_exceeded = True |
| 284 | 278 | |
| 285 | 279 | # appending a raw byte to a unicode string here. Not clean but |
| 286 | 280 | # all we do later is check for the ascii-sequence 'DDE' later... |
| 287 | 281 | elif char < 128: |
| 288 | 282 | field_contents += unichr(char) |
| 289 | - #print('DEBUG: at idx {:4d}: add byte {} ({})' | |
| 290 | - # .format(idx, unichr(char), char)) | |
| 291 | 283 | else: |
| 292 | 284 | field_contents += u'?' |
| 293 | - #print('DEBUG: at idx {:4d}: add byte ? ({})' | |
| 294 | - # .format(idx, char)) | |
| 295 | - #print('\nstream len = {}'.format(idx)) | |
| 285 | + log.debug('Checked {0} characters, found {1} fields' | |
| 286 | + .format(idx, len(result_parts))) | |
| 296 | 287 | |
| 297 | 288 | # copy behaviour of process_xml: Just concatenate unicode strings |
| 298 | 289 | return u''.join(result_parts) |
| ... | ... | @@ -308,7 +299,7 @@ def process_ole_storage(ole): |
| 308 | 299 | links = '' |
| 309 | 300 | try: |
| 310 | 301 | stream = ole.openstream(st) |
| 311 | - #print('Checking stream {0}'.format(st)) | |
| 302 | + log.debug('Checking stream {0}'.format(st)) | |
| 312 | 303 | links = process_ole_stream(stream) |
| 313 | 304 | except: |
| 314 | 305 | raise |
| ... | ... | @@ -318,13 +309,13 @@ def process_ole_storage(ole): |
| 318 | 309 | if links: |
| 319 | 310 | results.append(links) |
| 320 | 311 | elif st_type == olefile.STGTY_STORAGE: # a storage |
| 321 | - #print('Checking storage {0}'.format(st)) | |
| 312 | + log.debug('Checking storage {0}'.format(st)) | |
| 322 | 313 | links = process_ole_storage(st) |
| 323 | 314 | if links: |
| 324 | 315 | results.extend(links) |
| 325 | 316 | else: |
| 326 | - #print('Warning: unexpected type {0} for entry {1}. Ignore it' | |
| 327 | - # .format(st_type, st)) | |
| 317 | + log.info('unexpected type {0} for entry {1}. Ignore it' | |
| 318 | + .format(st_type, st)) | |
| 328 | 319 | continue |
| 329 | 320 | return results |
| 330 | 321 | |
| ... | ... | @@ -338,7 +329,6 @@ def process_ole(filepath): |
| 338 | 329 | word (possibly after some whitespace) |
| 339 | 330 | """ |
| 340 | 331 | log.debug('process_ole') |
| 341 | - #print('Looks like ole') | |
| 342 | 332 | ole = olefile.OleFileIO(filepath, path_encoding=None) |
| 343 | 333 | text_parts = process_ole_storage(ole) |
| 344 | 334 | return u'\n'.join(text_parts) |
| ... | ... | @@ -452,6 +442,8 @@ def main(cmd_line_args=None): |
| 452 | 442 | # enable logging in the modules: |
| 453 | 443 | log.setLevel(logging.NOTSET) |
| 454 | 444 | |
| 445 | + if args.json and args.loglevel.lower() == 'debug': | |
| 446 | + log.warning('Debug log output will not be json-compatible!') | |
| 455 | 447 | |
| 456 | 448 | if args.nounquote : |
| 457 | 449 | global NO_QUOTES | ... | ... |