Skip to content

Commit

Permalink
Merge pull request #67 from Preston-Landers/bugfix/issue_66_timed_rot…
Browse files Browse the repository at this point in the history
…ation

Fixes Issue #66: ConcurrentTimedRotatingFileHandler rotating gzip fil…
  • Loading branch information
Preston-Landers authored Nov 28, 2023
2 parents 6fb63e6 + 53755c4 commit 8951164
Show file tree
Hide file tree
Showing 5 changed files with 128 additions and 21 deletions.
45 changes: 45 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Current File",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"justMyCode": true
},
{
"name": "CLH PyTests",
"type": "python",
"request": "launch",
"module": "pytest",
"console": "integratedTerminal",
"cwd": "${workspaceFolder}/tests",
"args": [],
"justMyCode": false
},
{
"name": "Concurrent Log Handler basic example",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/tests/other/test.py",
"console": "integratedTerminal",
"cwd": "${workspaceFolder}/tests/other",
"justMyCode": true
},
{
"name": "CLH old stress test",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/tests/other/stresstest.py",
"console": "integratedTerminal",
"cwd": "${workspaceFolder}/tests/other",
"args": ["--log-calls", "2000"],
"justMyCode": true
},
]
}
6 changes: 4 additions & 2 deletions src/concurrent_log_handler/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -836,8 +836,10 @@ def doRollover(self): # noqa: C901, PLR0912
self.clh.do_gzip(dfn)

if self.backupCount > 0:
for s in self.getFilesToDelete():
os.remove(s + gzip_ext)
# File will already have gzip extension here if applicable
# Thanks to @moynihan
for file in self.getFilesToDelete():
os.remove(file)
# if not self.delay:
# self.stream = self._open()
newRolloverAt = self.computeRollover(currentTime)
Expand Down
7 changes: 4 additions & 3 deletions tests/other/stresstest.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,12 @@
from time import sleep

# local lib; for testing
from concurrent_log_handler import PY2, ConcurrentRotatingFileHandler, randbits
from concurrent_log_handler import ConcurrentRotatingFileHandler, randbits

__version__ = "$Id$"
__author__ = "Lowell Alleman"

PY2 = False # No longer supporting Python 2.7

# ruff: noqa: F821, E501

# The total amount of rotated files to keep through the test run. Any data accumulated
Expand Down Expand Up @@ -252,7 +253,7 @@ def rand_string(str_len):

parser = OptionParser(
usage="usage: %prog",
version=__version__,
# version=__version__,
description="Stress test the concurrent_log_handler module.",
)
parser.add_option(
Expand Down
51 changes: 38 additions & 13 deletions tests/stresstest.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def worker_process(test_opts: TestOptions, process_id: int, rollover_counter):
rollover_counter.increment(file_handler.num_rollovers)


def validate_log_file(test_opts: TestOptions, run_time: float) -> bool:
def validate_log_file(test_opts: TestOptions, run_time: float, expect_all=True) -> bool:
process_tracker = {i: {} for i in range(test_opts.num_processes)}

# Sort log files, starting with the most recent backup
Expand Down Expand Up @@ -210,13 +210,14 @@ def validate_log_file(test_opts: TestOptions, run_time: float) -> bool:
process_tracker[process_id][message_id] = msg_state

log_calls = test_opts.log_calls
for process_id, message_ids in process_tracker.items():
if len(message_ids) != log_calls:
print(
f"Error: Missing messages from Process-{process_id}: "
f"len(message_ids) {len(message_ids)} != log_calls {log_calls}"
)
return False
if expect_all:
for process_id, message_ids in process_tracker.items():
if len(message_ids) != log_calls:
print(
f"Error: Missing messages from Process-{process_id}: "
f"len(message_ids) {len(message_ids)} != log_calls {log_calls}"
)
return False
print(
f"{run_time:.2f} seconds to read {chars_read} chars "
f"from {len(all_log_files)} files ({chars_read / run_time:.2f} chars/sec)"
Expand Down Expand Up @@ -248,6 +249,33 @@ def run_stress_test(test_opts: TestOptions) -> int:

end_time = time.time()

print(
f"All processes finished. (Rollovers: "
f"{rollover_counter.get_value()} - min was {test_opts.min_rollovers})"
)

# If backupCount is less than 10, assume we want specifically to
# test backupCount. This means, in most cases, we will have deleted
# some logs files and we should not expect to find all log files.
# Therefore we can't do the "expect_all" check where it looks for any
# missing lines. It would be nice to be able to combine these somehow.
MAGIC_BACKUP_COUNT = 10
expect_all_lines = True
backup_count = test_opts.log_opts.get("backupCount", 0)
if backup_count > 0 and backup_count < MAGIC_BACKUP_COUNT:
expect_all_lines = False
log_path = os.path.join(test_opts.log_dir, test_opts.log_file)
log_files = glob.glob(f"{log_path}*")
# Check that backupCount was not exceeded.
# The +1 is because we're counting 'backups' plus the main log file.
if len(log_files) != backup_count + 1:
print(
f"Error: {len(log_files)} log files were created but "
f"we expected {backup_count + 1}. Could indicate a failure "
f"to rotate properly or to delete excessive backups (`backupCount`)."
)
return 1

# Each test should trigger some minimum number of rollovers.
if (
test_opts.min_rollovers
Expand All @@ -258,15 +286,12 @@ def run_stress_test(test_opts: TestOptions) -> int:
f"we expected at least {test_opts.min_rollovers}."
)
return 1
print(
f"All processes finished. (Rollovers: "
f"{rollover_counter.get_value()} - min was {test_opts.min_rollovers})"
)

# Check for any omissions or duplications.
if validate_log_file(test_opts, end_time - start_time):
if validate_log_file(test_opts, end_time - start_time, expect_all=expect_all_lines):
print("Stress test passed.")
return 0

print("Stress test failed.")
return 1

Expand Down
40 changes: 37 additions & 3 deletions tests/test_stresstest.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,24 @@

"""
Pytest based unit test cases to drive stresstest.py.
See comments about backupCount in stresstest.py. In short,
if backupCount is set here less than 10, we assume that
some logs are deleted before the end of the test and therefore
don't test specifically for missing lines/items.
"""

import pytest
from stresstest import TestOptions, run_stress_test

TEST_CASES = {
"default test options": TestOptions(),
"backupCount=3": TestOptions(
log_opts=TestOptions.default_log_opts({"backupCount": 3}),
),
"backupCount=3, use_gzip=True": TestOptions(
log_opts=TestOptions.default_log_opts({"backupCount": 3, "use_gzip": True}),
),
"num_processes=2, log_calls=6_000": TestOptions(
num_processes=2, log_calls=6_000, min_rollovers=80
),
Expand Down Expand Up @@ -65,6 +76,32 @@
}
),
),
"backupCount=3, use_gzip=True, use_timed=True, interval=3, log_calls=3_000, num_processes=4": TestOptions(
use_timed=True,
num_processes=4,
log_calls=3_000,
min_rollovers=4,
log_opts=TestOptions.default_timed_log_opts(
{
"backupCount": 3,
"interval": 3,
"use_gzip": True,
}
),
),
"backupCount=4, use_timed=True, interval=4, log_calls=3_000, num_processes=5": TestOptions(
use_timed=True,
num_processes=5,
log_calls=3_000,
min_rollovers=3,
log_opts=TestOptions.default_timed_log_opts(
{
"backupCount": 4,
"interval": 4,
"use_gzip": False,
}
),
),
"use_timed=True, num_processes=15, interval=1, log_calls=5_000, use_gzip=True": TestOptions(
use_timed=True,
log_calls=5_000,
Expand Down Expand Up @@ -102,9 +139,6 @@
}
),
),
# TODO: it would be good to have some test cases that verify that backupCount is not exceeded.
# Testing time intervals other than seconds is difficult because the tests would
# take hours unless we find a way to mock things.
}


Expand Down

0 comments on commit 8951164

Please sign in to comment.