Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 10 additions & 6 deletions can/io/blf.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@
of uncompressed data each. This data contains the actual CAN messages and other
objects types.
"""

import gzip
import os
import struct
import zlib
import datetime
Expand Down Expand Up @@ -432,7 +433,8 @@ def __init__(
self.start_timestamp = None
self.stop_timestamp = None
# Write a default header which will be updated when stopped
self._write_header(FILE_HEADER_SIZE)
# Temporarily removed for the sake of GzipFiles
# self._write_header(FILE_HEADER_SIZE)

def _write_header(self, filesize):
header = [b"LOGG", FILE_HEADER_SIZE, self.application_id, 0, 0, 0, 2, 6, 8, 1]
Expand Down Expand Up @@ -540,8 +542,9 @@ def _add_object(self, obj_type, data, timestamp=None):

self._buffer_size += obj_size + padding_size
self.object_count += 1
if self._buffer_size >= self.max_container_size:
self._flush()
# Don't accidentally write to blf file prior to writing header
# if self._buffer_size >= self.max_container_size:
# self._flush()

def _flush(self):
"""Compresses and writes data in the buffer to file."""
Expand Down Expand Up @@ -582,12 +585,13 @@ def file_size(self) -> int:

def stop(self):
"""Stops logging and closes the file."""
self._flush()
if self.file.seekable():
# For a GzipFile, the file currently must be empty prior to writing
# this final header, else an OSError will occur
filesize = self.file.tell()
# Write header in the beginning of the file
self.file.seek(0)
self._write_header(filesize)
else:
LOG.error("Could not write BLF header since file is not seekable")
self._flush()
super().stop()
17 changes: 12 additions & 5 deletions can/io/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def __new__( # type: ignore

file_or_filename: AcceptedIOType = filename
if suffix == ".gz":
suffix, file_or_filename = Logger.compress(filename)
suffix, file_or_filename = Logger.compress(filename, *args, **kwargs)

try:
return Logger.message_writers[suffix](file_or_filename, *args, **kwargs)
Expand All @@ -98,13 +98,18 @@ def __new__( # type: ignore
) from None

@staticmethod
def compress(filename: StringPathLike) -> Tuple[str, FileLike]:
def compress(
filename: StringPathLike, *args: Any, **kwargs: Any
) -> Tuple[str, FileLike]:
"""
Return the suffix and io object of the decompressed file.
File will automatically recompress upon close.
"""
real_suffix = pathlib.Path(filename).suffixes[-2].lower()
mode = "ab" if real_suffix == ".blf" else "at"
if kwargs.get("append", False):
mode = "ab" if real_suffix == ".blf" else "at"
else:
mode = "wb" if real_suffix == ".blf" else "wt"

return real_suffix, gzip.open(filename, mode)

Expand Down Expand Up @@ -344,12 +349,14 @@ def do_rollover(self) -> None:
def _default_name(self) -> StringPathLike:
"""Generate the default rotation filename."""
path = pathlib.Path(self.base_filename)
stem = path.parts[-1].split(".")[0]
suffix = "".join(pathlib.Path(self.base_filename).suffixes[-2:])
new_name = (
path.stem
stem
+ "_"
+ datetime.now().strftime("%Y-%m-%dT%H%M%S")
+ "_"
+ f"#{self.rollover_count:03}"
+ path.suffix
+ suffix
)
return str(path.parent / new_name)