From 8238efc7e2903388b3cdf30949e237be93b8c7c4 Mon Sep 17 00:00:00 2001 From: Tim Gates Date: Mon, 28 Dec 2020 22:52:33 +1100 Subject: [PATCH 01/70] docs: fix simple typo, theshold -> threshold There is a small typo in logbook/handlers.py. Should read `threshold` rather than `theshold`. --- logbook/handlers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/logbook/handlers.py b/logbook/handlers.py index 9c28a83..bf01b00 100644 --- a/logbook/handlers.py +++ b/logbook/handlers.py @@ -1767,7 +1767,7 @@ def application(environ, start_response): .. versionchanged:: 0.3 The default behaviour is to buffer up records and then invoke another - handler when a severity theshold was reached with the buffer emitting. + handler when a severity threshold was reached with the buffer emitting. This now enables this logger to be properly used with the :class:`~logbook.MailHandler`. You will now only get one mail for each buffered record. However once the threshold was reached you would From 86a84fd999b703a3aee9b3557f29fb75a9c86546 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Fri, 9 Jun 2023 16:07:15 +0200 Subject: [PATCH 02/70] Use pyproject.toml based build I've changed the Cython build so that it's no longer allowed to fail. Most platforms will have wheels anyway. I've also added metadata to make the next release 3.7+ only, without changing anything else yet. --- MANIFEST.in | 2 +- logbook/_speedups.pyx | 2 - pyproject.toml | 41 +++++++ setup.cfg | 15 +-- setup.py | 245 ++++-------------------------------------- 5 files changed, 69 insertions(+), 236 deletions(-) create mode 100644 pyproject.toml diff --git a/MANIFEST.in b/MANIFEST.in index 85dd8a3..9480321 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,3 @@ -include MANIFEST.in Makefile CHANGES logbook/_speedups.c logbook/_speedups.pyx tox.ini LICENSE +include MANIFEST.in Makefile CHANGES logbook/_speedups.pyx tox.ini LICENSE include scripts/test_setup.py recursive-include tests * diff --git a/logbook/_speedups.pyx b/logbook/_speedups.pyx index 8e1d216..6dea7c9 100644 --- a/logbook/_speedups.pyx +++ b/logbook/_speedups.pyx @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# cython: language_level=2 """ logbook._speedups ~~~~~~~~~~~~~~~~~ diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..1cec396 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,41 @@ +[build-system] +requires = ["setuptools", "Cython; python_implementation == 'CPython'"] +build-backend = "setuptools.build_meta" + +[project] +name = "Logbook" +license = { text = "BSD-3-Clause" } +authors = [ + { name = "Armin Ronacher", email = "armin.ronacher@active-4.com" }, + { name = "Georg Brandl" }, +] +description = "A logging replacement for Python" +readme = "README.md" +maintainers = [ + { name = "Frazer McLean", email = "frazer@frazermclean.co.uk" }, +] +classifiers = [ + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", +] +requires-python = ">=3.7" +dynamic = ["version"] + +[project.urls] +Documentation = "https://logbook.readthedocs.io" +"Source Code" = "https://github.com/getlogbook/logbook" + +[project.optional-dependencies] +test = ["pytest>4", "pytest-cov>=2.6"] +dev = ["Logbook[test]"] +execnet = ["execnet>=1.0.9"] +sqlalchemy = ["sqlalchemy"] +redis = ["redis"] +zmq = ["pyzmq"] +jinja = ["Jinja2"] +compression = ["brotli"] +all = ["Logbook[test,dev,execnet,sqlalchemy,redis,zmq,jinja,compression]"] diff --git a/setup.cfg b/setup.cfg index 60070cd..ca6025f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,7 +1,10 @@ -[build_sphinx] -source-dir = docs/ -build-dir = docs/_build -all_files = 1 +[metadata] +version = attr: logbook.__version__ -[upload_docs] -upload-dir = docs/_build/html +[options] +packages = find: +zip_safe = False + +[options.packages.find] +exclude = + tests diff --git a/setup.py b/setup.py index 2f2a5e3..6355c98 100644 --- a/setup.py +++ b/setup.py @@ -1,245 +1,36 @@ -r""" -Logbook -------- - -An awesome logging implementation that is fun to use. - -Quickstart -`````````` - -:: - - from logbook import Logger - log = Logger('A Fancy Name') - - log.warn('Logbook is too awesome for most applications') - log.error("Can't touch this") - -Works for web apps too -`````````````````````` - -:: - - from logbook import MailHandler, Processor - - mailhandler = MailHandler(from_addr='servererror@example.com', - recipients=['admin@example.com'], - level='ERROR', format_string=u'''\ - Subject: Application Error for {record.extra[path]} [{record.extra[method]}] - - Message type: {record.level_name} - Location: {record.filename}:{record.lineno} - Module: {record.module} - Function: {record.func_name} - Time: {record.time:%Y-%m-%d %H:%M:%S} - Remote IP: {record.extra[ip]} - Request: {record.extra[path]} [{record.extra[method]}] - - Message: - - {record.message} - ''') - - def handle_request(request): - def inject_extra(record, handler): - record.extra['ip'] = request.remote_addr - record.extra['method'] = request.method - record.extra['path'] = request.path - - with Processor(inject_extra): - with mailhandler: - # execute code that might fail in the context of the - # request. -""" - import os import platform -import sys -from itertools import chain - -from distutils.command.build_ext import build_ext -from distutils.errors import ( - CCompilerError, DistutilsExecError, DistutilsPlatformError) -from setuptools import Distribution as _Distribution, Extension, setup -from setuptools.command.test import test as TestCommand - -cmdclass = {} -if sys.version_info < (2, 6): - raise Exception('Logbook requires Python 2.6 or higher.') - -cpython = platform.python_implementation() == 'CPython' - -ext_modules = [Extension('logbook._speedups', sources=['logbook/_speedups.c'])] - -ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError) -if sys.platform == 'win32': - # 2.6's distutils.msvc9compiler can raise an IOError when failing to - # find the compiler - ext_errors += (IOError,) - - -class BuildFailed(Exception): - def __init__(self): - self.cause = sys.exc_info()[1] # work around py 2/3 different syntax - - -class ve_build_ext(build_ext): - """This class allows C extension building to fail.""" - def run(self): - try: - build_ext.run(self) - except DistutilsPlatformError: - raise BuildFailed() +from setuptools import Extension, setup - def build_extension(self, ext): - try: - build_ext.build_extension(self, ext) - except ext_errors: - raise BuildFailed() - except ValueError: - # this can happen on Windows 64 bit, see Python issue 7511 - if "'path'" in str(sys.exc_info()[1]): # works with both py 2/3 - raise BuildFailed() - raise -cmdclass['build_ext'] = ve_build_ext - - -class Distribution(_Distribution): - - def has_ext_modules(self): - # We want to always claim that we have ext_modules. This will be fine - # if we don't actually have them (such as on PyPy) because nothing - # will get built, however we don't want to provide an overally broad - # Wheel package when building a wheel without C support. This will - # ensure that Wheel knows to treat us as if the build output is - # platform specific. - return True - - -class PyTest(TestCommand): - # from https://pytest.org/latest/goodpractises.html\ - # #integration-with-setuptools-test-commands - user_options = [('pytest-args=', 'a', 'Arguments to pass to py.test')] - - default_options = ['tests'] - - def initialize_options(self): - TestCommand.initialize_options(self) - self.pytest_args = '' - - def finalize_options(self): - TestCommand.finalize_options(self) - self.test_args = [] - self.test_suite = True - - def run_tests(self): - # import here, cause outside the eggs aren't loaded - import pytest - errno = pytest.main( - ' '.join(self.default_options) + ' ' + self.pytest_args) - sys.exit(errno) - -cmdclass['test'] = PyTest +IS_CPYTHON = platform.python_implementation() == "CPython" +DISABLE_EXTENSION = bool(os.environ.get("DISABLE_LOGBOOK_CEXT")) def status_msgs(*msgs): - print('*' * 75) + print("*" * 75) for msg in msgs: print(msg) - print('*' * 75) - -version_file_path = os.path.join( - os.path.dirname(__file__), 'logbook', '__version__.py') - -with open(version_file_path) as version_file: - exec(version_file.read()) # pylint: disable=W0122 - -extras_require = dict() -if sys.version_info[:2] < (3, 0): - extras_require['test'] = set(['pytest', 'pytest-cov<2.6']) -else: - extras_require['test'] = set(['pytest>4.0', 'pytest-cov>=2.6']) - -if sys.version_info[:2] < (3, 3): - extras_require['test'] |= set(['mock']) - -extras_require['dev'] = set(['cython']) | extras_require['test'] - -extras_require['execnet'] = set(['execnet>=1.0.9']) -extras_require['sqlalchemy'] = set(['sqlalchemy']) -extras_require['redis'] = set(['redis']) -extras_require['zmq'] = set(['pyzmq']) -extras_require['jinja'] = set(['Jinja2']) -extras_require['compression'] = set(['brotli']) - -extras_require['all'] = set(chain.from_iterable(extras_require.values())) + print("*" * 75) -def run_setup(with_cext): - kwargs = {} - if with_cext: - kwargs['ext_modules'] = ext_modules - else: - kwargs['ext_modules'] = [] - - setup( - name='Logbook', - version=__version__, - license='BSD', - url='http://logbook.pocoo.org/', - author='Armin Ronacher, Georg Brandl', - author_email='armin.ronacher@active-4.com', - description='A logging replacement for Python', - long_description=__doc__, - packages=['logbook'], - zip_safe=False, - platforms='any', - cmdclass=cmdclass, - tests_require=['pytest'], - classifiers=[ - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - - ], - extras_require=extras_require, - distclass=Distribution, - **kwargs - ) - -if not cpython: - run_setup(False) +if not IS_CPYTHON: status_msgs( - 'WARNING: C extensions are not supported on ' + - 'this Python platform, speedups are not enabled.', - 'Plain-Python build succeeded.' + "WARNING: C extensions are not supported on this Python platform, " + "speedups are not enabled.", ) -elif os.environ.get('DISABLE_LOGBOOK_CEXT'): - run_setup(False) + ext_modules = [] +elif DISABLE_EXTENSION: status_msgs( - 'DISABLE_LOGBOOK_CEXT is set; ' + - 'not attempting to build C extensions.', - 'Plain-Python build succeeded.' + "DISABLE_LOGBOOK_CEXT is set; not attempting to build C extensions.", ) + ext_modules = [] else: - try: - run_setup(True) - except BuildFailed as exc: - status_msgs( - exc.cause, - 'WARNING: The C extension could not be compiled, ' + - 'speedups are not enabled.', - 'Failure information, if any, is above.', - 'Retrying the build without the C extension now.' - ) - - run_setup(False) + from Cython.Build import cythonize + ext_modules = cythonize( + [Extension("logbook._speedups", sources=["logbook/_speedups.pyx"])], + language_level=3, + ) - status_msgs( - 'WARNING: The C extension could not be compiled, ' + - 'speedups are not enabled.', - 'Plain-Python build succeeded.' - ) +setup(ext_modules=ext_modules) From b83589043610a97e954447f321acab23024d4bbf Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sat, 10 Jun 2023 00:32:34 +0200 Subject: [PATCH 03/70] Initial GitHub actions setup tox environment variables aren't available when the package is installed, so I've removed the check on whether speedups are available. It's worth noting that the speedups extension isn't allowed to fail anymore anyway. --- .github/workflows/main.yml | 35 +++++++++++++++++++++++++++++++++++ MANIFEST.in | 1 - pyproject.toml | 9 ++++++--- scripts/test_setup.py | 18 ------------------ scripts/travis_build.py | 18 ------------------ tox.ini | 38 +++++++++++++++++--------------------- 6 files changed, 58 insertions(+), 61 deletions(-) create mode 100644 .github/workflows/main.yml delete mode 100644 scripts/test_setup.py delete mode 100644 scripts/travis_build.py diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..e7f944b --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,35 @@ +--- +name: CI + +on: + push: + branches: ["develop", "master"] + pull_request: + branches: ["develop", "master"] + workflow_dispatch: + +jobs: + tests: + name: "Python ${{ matrix.python-version }}" + runs-on: "ubuntu-latest" + + strategy: + fail-fast: false + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + + steps: + - uses: "actions/checkout@v3" + - uses: "actions/setup-python@v4" + with: + python-version: "${{ matrix.python-version }}" + - name: "Install dependencies" + run: | + set -xe + python -VV + python -m site + python -m pip install --upgrade pip setuptools wheel + python -m pip install --upgrade tox tox-gh-actions + + - name: "Run tox targets for ${{ matrix.python-version }}" + run: "python -m tox" diff --git a/MANIFEST.in b/MANIFEST.in index 9480321..c755652 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,2 @@ include MANIFEST.in Makefile CHANGES logbook/_speedups.pyx tox.ini LICENSE -include scripts/test_setup.py recursive-include tests * diff --git a/pyproject.toml b/pyproject.toml index 1cec396..037cd75 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,12 +30,15 @@ Documentation = "https://logbook.readthedocs.io" "Source Code" = "https://github.com/getlogbook/logbook" [project.optional-dependencies] -test = ["pytest>4", "pytest-cov>=2.6"] -dev = ["Logbook[test]"] +test = ["pytest>=6"] +dev = ["Logbook[test]", "tox>=4"] execnet = ["execnet>=1.0.9"] sqlalchemy = ["sqlalchemy"] redis = ["redis"] zmq = ["pyzmq"] jinja = ["Jinja2"] compression = ["brotli"] -all = ["Logbook[test,dev,execnet,sqlalchemy,redis,zmq,jinja,compression]"] +all = ["Logbook[execnet,sqlalchemy,redis,zmq,jinja,compression]"] + +[tool.pytest.ini_options] +testpaths = ["tests"] diff --git a/scripts/test_setup.py b/scripts/test_setup.py deleted file mode 100644 index f7b62c2..0000000 --- a/scripts/test_setup.py +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/python -from pip._internal import main as pip_main -import sys - -if __name__ == '__main__': - python_version = sys.version_info - - deps = [ - "execnet>=1.0.9", - "pytest", - "pyzmq", - "sqlalchemy", - "Jinja2", - ] - - print("Setting up dependencies...") - result = pip_main(["install"] + deps) - sys.exit(result) diff --git a/scripts/travis_build.py b/scripts/travis_build.py deleted file mode 100644 index db3f794..0000000 --- a/scripts/travis_build.py +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/python -from __future__ import print_function -import ast -import os -import subprocess -import sys - -_PYPY = hasattr(sys, "pypy_version_info") - -if __name__ == '__main__': - use_cython = ast.literal_eval(os.environ["USE_CYTHON"]) - if use_cython and _PYPY: - print("PyPy+Cython configuration skipped") - else: - sys.exit( - subprocess.call( - "make cybuild test" if use_cython else "make test", shell=True) - ) diff --git a/tox.ini b/tox.ini index 7c21509..dd1f1db 100644 --- a/tox.ini +++ b/tox.ini @@ -1,32 +1,28 @@ [tox] -envlist = py{27,35,36,37}{,-speedups},pypy,py37-docs -skipsdist = True +envlist = py{37,38,39,310,311}{,-nospeedups},pypy,docs [testenv] -whitelist_externals = - rm -deps = - py{27}: mock - pytest - speedups: Cython -setenv = - !speedups: DISABLE_LOGBOOK_CEXT=1 - !speedups: DISABLE_LOGBOOK_CEXT_AT_RUNTIME=1 -changedir = {toxinidir} +extras = + all + test +set_env = + nospeedups: DISABLE_LOGBOOK_CEXT_AT_RUNTIME=1 commands = - {envpython} -m pip install -e {toxinidir}[all] - - # Make sure that speedups are available/not available, as needed. - speedups: {envpython} -c "from logbook.base import _has_speedups; exit(0 if _has_speedups else 1)" - !speedups: {envpython} -c "from logbook.base import _has_speedups; exit(1 if _has_speedups else 0)" + pytest {posargs} - {envpython} {toxinidir}/scripts/test_setup.py - py.test {toxinidir}/tests - -[testenv:py37-docs] +[testenv:docs] +basepython = python3.7 deps = Sphinx>=1.3 changedir = docs commands = sphinx-build -W -b html . _build/html sphinx-build -W -b linkcheck . _build/linkcheck + +[gh-actions] +python = + 3.7: py37, docs + 3.8: py38 + 3.9: py39 + 3.10: py310 + 3.11: py311 From bc6c56ac2eef9652feaffa917a4d23c3c33e6de7 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sat, 17 Jun 2023 01:10:31 +0200 Subject: [PATCH 04/70] Set up redis on GitHub Actions I've set up the environment variables for general CI usage, even though in this case the defaults work. --- .github/workflows/main.yml | 11 +++++++++++ tests/test_queues.py | 9 ++++++--- tox.ini | 3 +++ 3 files changed, 20 insertions(+), 3 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e7f944b..94b5946 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -13,6 +13,17 @@ jobs: name: "Python ${{ matrix.python-version }}" runs-on: "ubuntu-latest" + services: + redis: + image: redis + ports: + - 6379:6379 + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + strategy: fail-fast: false matrix: diff --git a/tests/test_queues.py b/tests/test_queues.py index 63bf015..1c6babd 100644 --- a/tests/test_queues.py +++ b/tests/test_queues.py @@ -10,6 +10,9 @@ import pytest +REDIS_HOST = os.environ.get('REDIS_HOST', 'localhost') +REDIS_PORT = int(os.environ.get('REDIS_PORT', '6379')) + @require_module('zmq') def test_zeromq_handler(logger, handlers, subscriber): @@ -225,7 +228,7 @@ def test_redis_handler(): KEY = 'redis-{}'.format(os.getpid()) FIELDS = ['message', 'host'] - r = redis.Redis(decode_responses=True) + r = redis.Redis(REDIS_HOST, REDIS_PORT, decode_responses=True) redis_handler = RedisHandler(key=KEY, level=logbook.INFO, bubble=True) # We don't want output for the tests, so we can wrap everything in a # NullHandler @@ -304,7 +307,7 @@ def test_redis_handler_lpush(): time.sleep(1.5) - r = redis.Redis(decode_responses=True) + r = redis.Redis(REDIS_HOST, REDIS_PORT, decode_responses=True) logs = r.lrange(KEY, 0, -1) assert logs assert "new item" in logs[0] @@ -332,7 +335,7 @@ def test_redis_handler_rpush(): time.sleep(1.5) - r = redis.Redis(decode_responses=True) + r = redis.Redis(REDIS_HOST, REDIS_PORT, decode_responses=True) logs = r.lrange(KEY, 0, -1) assert logs assert "old item" in logs[0] diff --git a/tox.ini b/tox.ini index dd1f1db..58b2d3c 100644 --- a/tox.ini +++ b/tox.ini @@ -7,6 +7,9 @@ extras = test set_env = nospeedups: DISABLE_LOGBOOK_CEXT_AT_RUNTIME=1 +pass_env = + REDIS_HOST + REDIS_PORT commands = pytest {posargs} From 5472be9f5b6d4f2960d9fc3337a34dd30b3f622a Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sat, 17 Jun 2023 02:34:01 +0200 Subject: [PATCH 05/70] Test on macOS and Windows --- .github/workflows/main.yml | 55 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 53 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 94b5946..ad26e56 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -9,8 +9,8 @@ on: workflow_dispatch: jobs: - tests: - name: "Python ${{ matrix.python-version }}" + linux: + name: "Linux (${{ matrix.python-version }})" runs-on: "ubuntu-latest" services: @@ -44,3 +44,54 @@ jobs: - name: "Run tox targets for ${{ matrix.python-version }}" run: "python -m tox" + + windows: + name: "Windows (${{ matrix.python-version }}, ${{ matrix.arch }})" + runs-on: "windows-latest" + + strategy: + fail-fast: false + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + arch: ["x86", "x64"] + + steps: + - uses: "actions/checkout@v3" + - uses: "actions/setup-python@v4" + with: + python-version: "${{ matrix.python-version }}" + architecture: "${{ matrix.arch }}" + + - run: python -VV + - run: python -m site + - run: python -m pip install --upgrade pip setuptools wheel + - run: python -m pip install --upgrade tox tox-gh-actions + + - name: "Run tox targets for ${{ matrix.python-version }} on ${{ matrix.arch }}" + run: "python -m tox -- -k 'not redis'" + + macos: + name: "macOS (${{ matrix.python-version }})" + runs-on: "macos-latest" + + strategy: + fail-fast: false + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] + + steps: + - uses: "actions/checkout@v3" + - uses: "actions/setup-python@v4" + with: + python-version: "${{ matrix.python-version }}" + architecture: "${{ matrix.arch }}" + - name: "Install dependencies" + run: | + set -xe + python -VV + python -m site + python -m pip install --upgrade pip setuptools wheel + python -m pip install --upgrade tox tox-gh-actions + + - name: "Run tox targets for ${{ matrix.python-version }}" + run: "python -m tox -- -k 'not redis'" From 0fa73ccdec0e7d9f4bcd5b442ead1f3ff3a12053 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sat, 24 Jun 2023 00:56:14 +0200 Subject: [PATCH 06/70] Fix docs build Co-authored-by: James Tocknell --- CHANGES | 2 +- README.md | 2 +- docs/api/base.rst | 2 +- docs/api/compat.rst | 2 +- docs/api/handlers.rst | 2 +- docs/api/internal.rst | 4 ++-- docs/api/more.rst | 2 +- docs/api/notifiers.rst | 2 +- docs/api/queues.rst | 2 +- docs/api/ticketing.rst | 2 +- docs/api/utilities.rst | 4 ++-- docs/conf.py | 2 +- docs/index.rst | 6 +++--- docs/quickstart.rst | 6 ++---- docs/ticketing.rst | 2 +- logbook/base.py | 5 ++--- logbook/more.py | 6 +++--- logbook/queues.py | 2 +- 18 files changed, 26 insertions(+), 29 deletions(-) diff --git a/CHANGES b/CHANGES index 4f2c3ac..f990cd4 100644 --- a/CHANGES +++ b/CHANGES @@ -223,7 +223,7 @@ Version 0.4.1 Released on December 12th. Codename "121212" - Fixed several outstanding encoding problems, thanks to @dvarazzo. -- Merged in minor pull requests (see https://github.com/mitsuhiko/logbook/pulls?&state=closed) +- Merged in minor pull requests (see https://github.com/getlogbook/logbook/pulls?q=is%3Aclosed) Version 0.4 ----------- diff --git a/README.md b/README.md index 7be09b1..0f864ef 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ Logbook is a nice logging replacement. It should be easy to setup, use and configure and support web applications :) -For more information: http://logbook.readthedocs.org +For more information: https://logbook.readthedocs.org [ti]: https://secure.travis-ci.org/getlogbook/logbook.svg?branch=master [tl]: https://travis-ci.org/getlogbook/logbook diff --git a/docs/api/base.rst b/docs/api/base.rst index 547430a..c7793ee 100644 --- a/docs/api/base.rst +++ b/docs/api/base.rst @@ -3,7 +3,7 @@ Core Interface This implements the core interface. -.. module:: logbook +.. currentmodule:: logbook .. autoclass:: Logger :members: diff --git a/docs/api/compat.rst b/docs/api/compat.rst index 15ca267..c6d738e 100644 --- a/docs/api/compat.rst +++ b/docs/api/compat.rst @@ -4,7 +4,7 @@ Compatibility This documents compatibility support with existing systems such as :mod:`logging` and :mod:`warnings`. -.. module:: logbook.compat +.. currentmodule:: logbook.compat Logging Compatibility --------------------- diff --git a/docs/api/handlers.rst b/docs/api/handlers.rst index ac27554..f4690af 100644 --- a/docs/api/handlers.rst +++ b/docs/api/handlers.rst @@ -6,7 +6,7 @@ handlers. There are additional handlers for special purposes in the :mod:`logbook.more`, :mod:`logbook.ticketing` and :mod:`logbook.queues` modules. -.. module:: logbook +.. currentmodule:: logbook Base Interface -------------- diff --git a/docs/api/internal.rst b/docs/api/internal.rst index c1aa0d0..9442b31 100644 --- a/docs/api/internal.rst +++ b/docs/api/internal.rst @@ -4,7 +4,7 @@ Internal API This documents the internal API that might be useful for more advanced setups or custom handlers. -.. module:: logbook.base +.. currentmodule:: logbook.base .. autofunction:: dispatch_record @@ -18,7 +18,7 @@ setups or custom handlers. :members: :inherited-members: -.. module:: logbook.handlers +.. currentmodule:: logbook.handlers .. autoclass:: StringFormatterHandlerMixin :members: diff --git a/docs/api/more.rst b/docs/api/more.rst index 738b995..eb06aec 100644 --- a/docs/api/more.rst +++ b/docs/api/more.rst @@ -6,7 +6,7 @@ beyond the scope of Logbook itself or depend on external libraries. Additionally there are some handlers in :mod:`logbook.ticketing`, :mod:`logbook.queues` and :mod:`logbook.notifiers`. -.. module:: logbook.more +.. currentmodule:: logbook.more Tagged Logging -------------- diff --git a/docs/api/notifiers.rst b/docs/api/notifiers.rst index 8a1a404..4dec4c2 100644 --- a/docs/api/notifiers.rst +++ b/docs/api/notifiers.rst @@ -8,7 +8,7 @@ that depend on external libraries. The more module implements special handlers and other things that are beyond the scope of Logbook itself or depend on external libraries. -.. module:: logbook.notifiers +.. currentmodule:: logbook.notifiers .. autofunction:: create_notification_handler diff --git a/docs/api/queues.rst b/docs/api/queues.rst index 3e961b9..b7ab8a3 100644 --- a/docs/api/queues.rst +++ b/docs/api/queues.rst @@ -6,7 +6,7 @@ system. This is useful for distributed setups where you want multiple processes to log to the same backend. Currently supported are ZeroMQ as well as the :mod:`multiprocessing` :class:`~multiprocessing.Queue` class. -.. module:: logbook.queues +.. currentmodule:: logbook.queues ZeroMQ ------ diff --git a/docs/api/ticketing.rst b/docs/api/ticketing.rst index 5628e97..e39359a 100644 --- a/docs/api/ticketing.rst +++ b/docs/api/ticketing.rst @@ -6,7 +6,7 @@ log records are categorized by location and for every emitted log record a count is added. That way you know how often certain messages are triggered, at what times and when the last occurrence was. -.. module:: logbook.ticketing +.. currentmodule:: logbook.ticketing .. autoclass:: TicketingBaseHandler :members: diff --git a/docs/api/utilities.rst b/docs/api/utilities.rst index 22d35e3..2aa1be7 100644 --- a/docs/api/utilities.rst +++ b/docs/api/utilities.rst @@ -6,7 +6,7 @@ Misc. Utilities This documents general purpose utility functions available in Logbook. -.. module:: logbook +.. currentmodule:: logbook .. autofunction:: debug @@ -33,7 +33,7 @@ This documents general purpose utility functions available in Logbook. Slow Operations Logging ----------------------- -.. module:: logbook.utils +.. currentmodule:: logbook.utils .. autofunction:: logged_if_slow diff --git a/docs/conf.py b/docs/conf.py index aa90731..6409134 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -224,5 +224,5 @@ ] intersphinx_mapping = { - 'http://docs.python.org': None + "python": ("https://docs.python.org/3", None), } diff --git a/docs/index.rst b/docs/index.rst index 9d40b5d..a93d260 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -7,7 +7,7 @@ in mind and the idea to make logging fun: >>> from logbook import Logger, StreamHandler >>> import sys ->>> StreamHandler(sys.stdout).push_application() +>>> StreamHandler(sys.stdout).push_application() >>> log = Logger('Logbook') >>> log.info('Hello, World!') [2015-10-05 18:55:56.937141] INFO: Logbook: Hello, World! @@ -51,6 +51,6 @@ Project Information * `Mailing list`_ * IRC: ``#pocoo`` on freenode -.. _Download from PyPI: https://pypi.org/pypi/Logbook +.. _Download from PyPI: https://pypi.org/project/Logbook .. _Master repository on GitHub: https://github.com/getlogbook/logbook -.. _Mailing list: http://groups.google.com/group/pocoo-libs +.. _Mailing list: https://groups.google.com/g/pocoo-libs diff --git a/docs/quickstart.rst b/docs/quickstart.rst index dc914ee..2782b84 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -8,7 +8,7 @@ class, create yourself a logger and you are set: >>> from logbook import Logger, StreamHandler >>> import sys ->>> StreamHandler(sys.stdout).push_application() +>>> StreamHandler(sys.stdout).push_application() >>> log = Logger('My Awesome Logger') >>> log.warn('This is too cool for stdlib') [2015-10-05 19:02:03.575723] WARNING: My Awesome Logger: This is too cool for stdlib @@ -71,15 +71,13 @@ On top of those there are a couple of handlers for special use cases: * :class:`logbook.notifiers.GrowlHandler` and :class:`logbook.notifiers.LibNotifyHandler` for logging to the OS X Growl or the linux notification daemon. -* :class:`logbook.notifiers.BoxcarHandler` for logging to `boxcar`_. +* :class:`logbook.notifiers.BoxcarHandler` for logging to boxcar.io. * :class:`logbook.more.TwitterHandler` for logging to twitter. * :class:`logbook.more.ExternalApplicationHandler` for logging to an external application such as the OS X ``say`` command. * :class:`logbook.ticketing.TicketingHandler` for creating tickets from log records in a database or other data store. -.. _boxcar: http://boxcar.io/ - Registering Handlers -------------------- diff --git a/docs/ticketing.rst b/docs/ticketing.rst index 0252ad3..dde6b5f 100644 --- a/docs/ticketing.rst +++ b/docs/ticketing.rst @@ -65,4 +65,4 @@ Alternative backends can be swapped in by providing the `backend` parameter. There is a second implementation of a backend that is using MongoDB: :class:`~logbook.ticketing.MongoDBBackend`. -.. _SQLAlchemy: http://sqlalchemy.org/ +.. _SQLAlchemy: https://www.sqlalchemy.org/ diff --git a/logbook/base.py b/logbook/base.py index 15fa831..fac89db 100644 --- a/logbook/base.py +++ b/logbook/base.py @@ -60,8 +60,7 @@ def set_datetime_format(datetime_format): :py:obj:`datetime_format` (possibly time zone aware) This function defaults to creating datetime objects in UTC time, - using `datetime.utcnow() - `_, + using :func:`datetime.utcnow`, so that logbook logs all times in UTC time by default. This is recommended in case you have multiple software modules or instances running in different servers in different time zones, as @@ -79,7 +78,7 @@ def set_datetime_format(datetime_format): logbook.set_datetime_format("local") Other uses rely on your supplied :py:obj:`datetime_format`. - Using `pytz `_ for example:: + Using `pytz `_ for example:: from datetime import datetime import logbook diff --git a/logbook/more.py b/logbook/more.py index 3809cc0..50a23be 100644 --- a/logbook/more.py +++ b/logbook/more.py @@ -164,7 +164,7 @@ class TwitterHandler(Handler, StringFormatterHandlerMixin): If you don't want to register your own application and request xauth credentials, there are a couple of leaked consumer key and secret pairs from application explicitly whitelisted at Twitter - (`leaked secrets `_). + (`leaked secrets `_). """ default_format_string = TWITTER_FORMAT_STRING formatter_class = TwitterFormatter @@ -327,7 +327,7 @@ class ColorizingStreamHandlerMixin(object): .. versionchanged:: 1.0.0 Added Windows support if `colorama`_ is installed. - .. _`colorama`: https://pypi.org/pypi/colorama + .. _`colorama`: https://pypi.org/project/colorama """ _use_color = None @@ -383,7 +383,7 @@ class ColorizedStderrHandler(ColorizingStreamHandlerMixin, StderrHandler): .. versionchanged:: 1.0 Added Windows support if `colorama`_ is installed. - .. _`colorama`: https://pypi.org/pypi/colorama + .. _`colorama`: https://pypi.org/project/colorama """ def __init__(self, *args, **kwargs): StderrHandler.__init__(self, *args, **kwargs) diff --git a/logbook/queues.py b/logbook/queues.py index 162615b..0b76f0a 100644 --- a/logbook/queues.py +++ b/logbook/queues.py @@ -149,7 +149,7 @@ class MessageQueueHandler(Handler): Several other backends are also supported. Refer to the `kombu`_ documentation - .. _kombu: http://kombu.readthedocs.org/en/latest/introduction.html + .. _kombu: https://docs.celeryq.dev/projects/kombu/en/latest/introduction.html """ def __init__(self, uri=None, queue='logging', level=NOTSET, From c1e9cca3d83f5d685dfb81cbe65c6ba76670c6a1 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 25 Jun 2023 21:50:11 +0200 Subject: [PATCH 07/70] Add pre-commit config --- .pre-commit-config.yaml | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..ab2ba56 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,28 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-toml + - id: check-yaml + - id: check-added-large-files + - id: check-merge-conflict + - id: end-of-file-fixer + - id: trailing-whitespace + args: [--markdown-linebreak-ext=md] + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v2.7.1 + hooks: + - id: prettier + - repo: https://github.com/asottile/pyupgrade + rev: v3.7.0 + hooks: + - id: pyupgrade + args: [--py37-plus] + - repo: https://github.com/timothycrosley/isort + rev: 5.12.0 + hooks: + - id: isort + - repo: https://github.com/psf/black + rev: 23.3.0 + hooks: + - id: black From e5ba705368a46735466af786a82b1b1b10bb7257 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 25 Jun 2023 21:50:56 +0200 Subject: [PATCH 08/70] pre-commit: end-of-file-fixer --- .appveyor/after_test.bat | 2 +- .appveyor/build.cmd | 2 +- AUTHORS | 1 - docs/api/utilities.rst | 1 - tests/test_mail_handler.py | 6 ------ 5 files changed, 2 insertions(+), 10 deletions(-) diff --git a/.appveyor/after_test.bat b/.appveyor/after_test.bat index c3ea121..cf8a22b 100644 --- a/.appveyor/after_test.bat +++ b/.appveyor/after_test.bat @@ -3,4 +3,4 @@ IF DEFINED CYBUILD ( IF "%APPVEYOR_REPO_TAG%"=="true" ( twine upload -u %PYPI_USERNAME% -p %PYPI_PASSWORD% dist\*.whl ) -) \ No newline at end of file +) diff --git a/.appveyor/build.cmd b/.appveyor/build.cmd index 75ac073..243dc9a 100644 --- a/.appveyor/build.cmd +++ b/.appveyor/build.cmd @@ -18,4 +18,4 @@ IF "%DISTUTILS_USE_SDK%"=="1" ( ECHO Using default MSVC build environment ) -CALL %* \ No newline at end of file +CALL %* diff --git a/AUTHORS b/AUTHORS index 46b5378..65bb614 100644 --- a/AUTHORS +++ b/AUTHORS @@ -17,4 +17,3 @@ Contributors: - RaphaĆ«l Vinot - Rotem Yaari - Frazer McLean - diff --git a/docs/api/utilities.rst b/docs/api/utilities.rst index 2aa1be7..69afefd 100644 --- a/docs/api/utilities.rst +++ b/docs/api/utilities.rst @@ -43,4 +43,3 @@ Deprecations .. autofunction:: deprecated .. autofunction:: suppressed_deprecations - diff --git a/tests/test_mail_handler.py b/tests/test_mail_handler.py index 718d936..9f1fc48 100644 --- a/tests/test_mail_handler.py +++ b/tests/test_mail_handler.py @@ -226,9 +226,3 @@ def test_mail_handler_arguments(): 'server.example.com', 465, keyfile=None, certfile=None) assert mock_smtp_ssl.method_calls[1] == call().login( user='username', password='password') - - - - - - From 8f5be7b46e220a1cfd58e3787f9a5dbc31b27561 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 25 Jun 2023 21:51:15 +0200 Subject: [PATCH 09/70] pre-commit: trailing-whitespace --- logbook/handlers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/logbook/handlers.py b/logbook/handlers.py index 9c28a83..4fcc5ff 100644 --- a/logbook/handlers.py +++ b/logbook/handlers.py @@ -1592,7 +1592,7 @@ def wrap_segments(self, record, before): segments = [segment for segment in msg.split(self.record_delimiter)] return (before + segment + self.record_delimiter for segment in segments) - + def unix_envelope(self, record): before = u'<{}>{}'.format( self.encode_priority(record), From 568a4ea286fb1c6c57eedf734ff2bd612cb35b82 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 25 Jun 2023 21:55:02 +0200 Subject: [PATCH 10/70] pre-commit: prettier --- .pre-commit-config.yaml | 1 + .travis.yml | 44 ++++++++++++++++++++--------------------- README.md | 5 +---- appveyor.yml | 3 +-- 4 files changed, 25 insertions(+), 28 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ab2ba56..2ee3839 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,6 +13,7 @@ repos: rev: v2.7.1 hooks: - id: prettier + exclude: docs/sheet/.*\.html$ - repo: https://github.com/asottile/pyupgrade rev: v3.7.0 hooks: diff --git a/.travis.yml b/.travis.yml index d924fb1..3ee1867 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,39 +4,39 @@ addons: apt: sources: - chris-lea-redis-server - - sourceline: 'ppa:chris-lea/zeromq' + - sourceline: "ppa:chris-lea/zeromq" packages: - - redis-server - - libzmq3-dev + - redis-server + - libzmq3-dev services: -- redis-server + - redis-server python: -- '2.7' -- '3.5' -- '3.6' -- '3.7' + - "2.7" + - "3.5" + - "3.6" + - "3.7" before_install: - pip install coveralls install: -- pip install -U pip -- pip install cython -- cython logbook/_speedups.pyx + - pip install -U pip + - pip install cython + - cython logbook/_speedups.pyx env: -- DISABLE_LOGBOOK_CEXT=True -- CYBUILD=True + - DISABLE_LOGBOOK_CEXT=True + - CYBUILD=True script: -- pip install -e .[all] -- if [[ $GEVENT == 'True' ]] ; then pip install gevent; fi -- pytest --cov=logbook -r s tests + - pip install -e .[all] + - if [[ $GEVENT == 'True' ]] ; then pip install gevent; fi + - pytest --cov=logbook -r s tests matrix: exclude: include: - - python: "3.6" - env: GEVENT=True CYBUILD=True - - python: "2.7" - env: GEVENT=True CYBUILD=True + - python: "3.6" + env: GEVENT=True CYBUILD=True + - python: "2.7" + env: GEVENT=True CYBUILD=True after_success: - coveralls @@ -44,10 +44,10 @@ after_success: notifications: email: recipients: - - vmalloc@gmail.com + - vmalloc@gmail.com irc: channels: - - chat.freenode.net#pocoo + - chat.freenode.net#pocoo on_success: change on_failure: always use_notice: true diff --git a/README.md b/README.md index 0f864ef..d68501e 100644 --- a/README.md +++ b/README.md @@ -2,17 +2,14 @@ - - | | | -|--------------------|-----------------------------| +| ------------------ | --------------------------- | | Travis | [![Build Status][ti]][tl] | | AppVeyor | [![Build Status][ai]][al] | | Supported Versions | ![Supported Versions][vi] | | Latest Version | [![Latest Version][pi]][pl] | | Test Coverage | [![Test Coverage][ci]][cl] | - Logbook is a nice logging replacement. It should be easy to setup, use and configure and support web applications :) diff --git a/appveyor.yml b/appveyor.yml index 6f7ae9c..ff821af 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -14,7 +14,6 @@ environment: ENABLE_LOGBOOK_NTEVENTLOG_TESTS: "TRUE" matrix: - - PYTHON: "C:\\Python27" - PYTHON: "C:\\Python27" CYBUILD: "TRUE" @@ -67,7 +66,7 @@ artifacts: - path: dist\*.whl deploy: - description: '' + description: "" provider: GitHub auth_token: secure: 0yLUo/V+wwSvSFk9nBW/77RN9iTjJA1B5p/TM1XgVLPPFEZWkH756jyJ0FOmtJPt From f7d3d71cfbcb011f1c5aaf36a4ff78747f129412 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 25 Jun 2023 21:56:16 +0200 Subject: [PATCH 11/70] pre-commit: pyupgrade --- benchmark/bench_file_handler_unicode.py | 2 +- .../bench_logging_file_handler_unicode.py | 2 +- benchmark/run.py | 3 +- docs/conf.py | 13 +++--- logbook/__init__.py | 1 - logbook/_fallback.py | 7 ++- logbook/_termcolors.py | 1 - logbook/base.py | 19 ++++---- logbook/compat.py | 5 +-- logbook/concurrency.py | 6 +-- logbook/handlers.py | 43 +++++++++---------- logbook/helpers.py | 5 +-- logbook/more.py | 19 ++++---- logbook/notifiers.py | 7 ++- logbook/queues.py | 9 ++-- logbook/ticketing.py | 5 +-- logbook/utils.py | 18 ++++---- scripts/make-release.py | 5 +-- tests/conftest.py | 4 +- tests/test_ci.py | 1 - tests/test_deadlock.py | 4 +- tests/test_file_handler.py | 6 +-- tests/test_helpers.py | 4 +- tests/test_mail_handler.py | 2 +- tests/test_more.py | 10 ++--- tests/test_nteventlog_handler.py | 3 +- tests/test_processors.py | 4 +- tests/test_queues.py | 21 +++++---- tests/test_syslog_handler.py | 4 +- tests/test_unicode.py | 5 +-- tests/test_utils.py | 6 +-- tests/utils.py | 3 +- 32 files changed, 113 insertions(+), 134 deletions(-) diff --git a/benchmark/bench_file_handler_unicode.py b/benchmark/bench_file_handler_unicode.py index 255b8aa..2cee8e1 100644 --- a/benchmark/bench_file_handler_unicode.py +++ b/benchmark/bench_file_handler_unicode.py @@ -10,4 +10,4 @@ def run(): f = NamedTemporaryFile() with FileHandler(f.name) as handler: for x in xrange(500): - log.warning(u'this is handled \x6f') + log.warning('this is handled \x6f') diff --git a/benchmark/bench_logging_file_handler_unicode.py b/benchmark/bench_logging_file_handler_unicode.py index 7bb87c1..8dfb18d 100644 --- a/benchmark/bench_logging_file_handler_unicode.py +++ b/benchmark/bench_logging_file_handler_unicode.py @@ -11,4 +11,4 @@ def run(): handler = FileHandler(f.name) log.addHandler(handler) for x in xrange(500): - log.warning(u'this is handled \x6f') + log.warning('this is handled \x6f') diff --git a/benchmark/run.py b/benchmark/run.py index da2e3e0..c114d10 100644 --- a/benchmark/run.py +++ b/benchmark/run.py @@ -2,7 +2,6 @@ """ Runs the benchmarks """ -from __future__ import print_function import sys import os import re @@ -41,7 +40,7 @@ def run_bench(name, use_gevent=False): def bench_wrapper(use_gevent=False): print('=' * 80) - print('Running benchmark with Logbook %s (gevent enabled=%s)' % (version, use_gevent)) + print(f'Running benchmark with Logbook {version} (gevent enabled={use_gevent})') print('-' * 80) os.chdir(bench_directory) for bench in list_benchmarks(): diff --git a/docs/conf.py b/docs/conf.py index 6409134..8f91a6e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # Logbook documentation build configuration file, created by # sphinx-quickstart on Fri Jul 23 16:54:49 2010. @@ -42,8 +41,8 @@ master_doc = 'index' # General information about the project. -project = u'Logbook' -copyright = u'2010, Armin Ronacher, Georg Brandl' +project = 'Logbook' +copyright = '2010, Armin Ronacher, Georg Brandl' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -186,8 +185,8 @@ # (source start file, target name, title, author, # documentclass [howto/manual]). latex_documents = [ - ('index', 'Logbook.tex', u'Logbook Documentation', - u'Armin Ronacher, Georg Brandl', 'manual'), + ('index', 'Logbook.tex', 'Logbook Documentation', + 'Armin Ronacher, Georg Brandl', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -219,8 +218,8 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'logbook', u'Logbook Documentation', - [u'Armin Ronacher, Georg Brandl'], 1) + ('index', 'logbook', 'Logbook Documentation', + ['Armin Ronacher, Georg Brandl'], 1) ] intersphinx_mapping = { diff --git a/logbook/__init__.py b/logbook/__init__.py index a8ffc81..1a7fdd8 100644 --- a/logbook/__init__.py +++ b/logbook/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ logbook ~~~~~~~ diff --git a/logbook/_fallback.py b/logbook/_fallback.py index 10fed23..68e4e94 100644 --- a/logbook/_fallback.py +++ b/logbook/_fallback.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ logbook._fallback ~~~~~~~~~~~~~~~~~ @@ -40,7 +39,7 @@ def _del(self): return property(_get, _set, _del) -class _StackBound(object): +class _StackBound: def __init__(self, obj, push, pop): self.__obj = obj @@ -55,7 +54,7 @@ def __exit__(self, exc_type, exc_value, tb): self.__pop() -class StackedObject(object): +class StackedObject: """Baseclass for all objects that provide stack manipulation operations. """ @@ -131,7 +130,7 @@ def applicationbound(self, _cls=_StackBound): return _cls(self, self.push_application, self.pop_application) -class ContextStackManager(object): +class ContextStackManager: """Helper class for context objects that manages a stack of objects. """ diff --git a/logbook/_termcolors.py b/logbook/_termcolors.py index 0c42b3e..dc08cfa 100644 --- a/logbook/_termcolors.py +++ b/logbook/_termcolors.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ logbook._termcolors ~~~~~~~~~~~~~~~~~~~ diff --git a/logbook/base.py b/logbook/base.py index fac89db..cff8546 100644 --- a/logbook/base.py +++ b/logbook/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ logbook.base ~~~~~~~~~~~~ @@ -125,7 +124,7 @@ def utc_tz(): TRACE: 'TRACE', NOTSET: 'NOTSET' } -_reverse_level_names = dict((v, k) for (k, v) in iteritems(_level_names)) +_reverse_level_names = {v: k for (k, v) in iteritems(_level_names)} _missing = object() @@ -174,7 +173,7 @@ def get_level_name(level): raise LookupError('unknown level') -class _ExceptionCatcher(object): +class _ExceptionCatcher: """Helper for exception caught blocks.""" def __init__(self, logger, args, kwargs): @@ -305,7 +304,7 @@ def process(self, record): self.callback(record) -class _InheritedType(object): +class _InheritedType: __slots__ = () def __repr__(self): @@ -367,7 +366,7 @@ def _create_log_record(cls, dict): return cls.from_dict(dict) -class LogRecord(object): +class LogRecord: """A LogRecord instance represents an event being logged. LogRecord instances are created every time something is logged. They @@ -431,7 +430,7 @@ def __init__(self, channel, level, msg, args=None, kwargs=None, #: data. # TODO: Replace the lambda with str when we remove support for python 2 - self.extra = defaultdict(lambda: u'', extra or ()) + self.extra = defaultdict(lambda: '', extra or ()) #: If available, optionally the interpreter frame that pulled the #: heavy init. This usually points to somewhere in the dispatcher. #: Might not be available for all calls and is removed when the log @@ -534,7 +533,7 @@ def update_from_dict(self, d): self.time = parse_iso8601(self.time) # TODO: Replace the lambda with str when we remove support for python 2` - self.extra = defaultdict(lambda: u'', self.extra) + self.extra = defaultdict(lambda: '', self.extra) return self def _format_message(self, msg, *args, **kwargs): @@ -735,7 +734,7 @@ def dispatcher(self): return self._dispatcher() -class LoggerMixin(object): +class LoggerMixin: """This mixin class defines and implements the "usual" logger interface (i.e. the descriptive logging functions). @@ -873,7 +872,7 @@ def _log(self, level, args, kwargs): exc_info, extra, frame_correction) -class RecordDispatcher(object): +class RecordDispatcher: """A record dispatcher is the internal base class that implements the logic used by the :class:`~logbook.Logger`. """ @@ -1025,7 +1024,7 @@ class Logger(RecordDispatcher, LoggerMixin): """ -class LoggerGroup(object): +class LoggerGroup: """A LoggerGroup represents a group of loggers. It cannot emit log messages on its own but it can be used to set the disabled flag and log level of all loggers in the group. diff --git a/logbook/compat.py b/logbook/compat.py index 602a11a..60f47d5 100644 --- a/logbook/compat.py +++ b/logbook/compat.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ logbook.compat ~~~~~~~~~~~~~~ @@ -36,7 +35,7 @@ def redirect_logging(set_root_logger_level=True): logging.root.setLevel(logging.DEBUG) -class redirected_logging(object): +class redirected_logging: """Temporarily redirects logging for all threads and reverts it later to the old handlers. Mainly used by the internal unittests:: @@ -235,7 +234,7 @@ def redirect_warnings(): redirected_warnings().__enter__() -class redirected_warnings(object): +class redirected_warnings: """A context manager that copies and restores the warnings filter upon exiting the context, and logs warnings using the logbook system. diff --git a/logbook/concurrency.py b/logbook/concurrency.py index cd0fcb6..dd715c8 100644 --- a/logbook/concurrency.py +++ b/logbook/concurrency.py @@ -46,7 +46,7 @@ def is_gevent_enabled(): def thread_get_name(): return __threading__.currentThread().getName() - class GreenletRLock(object): + class GreenletRLock: def __init__(self): self._thread_local = thread_local() self._owner = None @@ -144,7 +144,7 @@ def thread_get_name(): greenlet_local = thread_local - class GreenletRLock(object): + class GreenletRLock: def acquire(self): pass @@ -195,7 +195,7 @@ def is_context_enabled(): return False else: - class ContextVar(object): + class ContextVar: def __init__(self, name): self.name = name self.local = thread_local() diff --git a/logbook/handlers.py b/logbook/handlers.py index 4fcc5ff..8abebae 100644 --- a/logbook/handlers.py +++ b/logbook/handlers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ logbook.handlers ~~~~~~~~~~~~~~~~ @@ -295,9 +294,9 @@ def handle_error(self, record, exc_info): reraise(exc_info[0], exc_info[1], exc_info[2]) elif behaviour == 'print': traceback.print_exception(*(exc_info + (None, sys.stderr))) - sys.stderr.write('Logged from file %s, line %s\n' % ( + sys.stderr.write('Logged from file {}, line {}\n'.format( record.filename, record.lineno)) - except IOError: + except OSError: pass @@ -315,7 +314,7 @@ class NullHandler(Handler): blackhole = True def __init__(self, level=NOTSET, filter=None): - super(NullHandler, self).__init__(level=level, filter=filter, + super().__init__(level=level, filter=filter, bubble=False) @@ -345,7 +344,7 @@ def __setattr__(self, name, value): setattr(self.handler, name, value) -class StringFormatter(object): +class StringFormatter: """Many handlers format the log entries to text format. This is done by a callable that is passed a log record and returns an unicode string. The default formatter for this is implemented as a class so @@ -391,7 +390,7 @@ def __call__(self, record, handler): return line -class StringFormatterHandlerMixin(object): +class StringFormatterHandlerMixin: """A mixin for handlers that provides a default integration for the :class:`~logbook.StringFormatter` class. This is used for all handlers by default that log text to a destination. @@ -426,7 +425,7 @@ def _set_format_string(self, value): del _get_format_string, _set_format_string -class HashingHandlerMixin(object): +class HashingHandlerMixin: """Mixin class for handlers that are hashing records.""" def hash_record_raw(self, record): @@ -618,7 +617,7 @@ def __init__(self, filename, mode='a', encoding=None, level=NOTSET, def _open(self, mode=None): if mode is None: mode = self._mode - self.stream = io.open(self._filename, mode, encoding=self.encoding) + self.stream = open(self._filename, mode, encoding=self.encoding) def write(self, item): self.ensure_stream_is_open() @@ -653,7 +652,7 @@ def __init__(self, filename, encoding=None, level=NOTSET, format_string=None, delay=False, filter=None, bubble=False, compression_quality=9): self._compression_quality = compression_quality - super(GZIPCompressionHandler, self).__init__(filename, mode='wb', encoding=encoding, level=level, + super().__init__(filename, mode='wb', encoding=encoding, level=level, format_string=format_string, delay=delay, filter=filter, bubble=bubble) def _open(self, mode=None): @@ -677,7 +676,7 @@ class BrotliCompressionHandler(FileHandler): def __init__(self, filename, encoding=None, level=NOTSET, format_string=None, delay=False, filter=None, bubble=False, compression_window_size=4*1024**2, compression_quality=11): - super(BrotliCompressionHandler, self).__init__(filename, mode='wb', encoding=encoding, level=level, + super().__init__(filename, mode='wb', encoding=encoding, level=level, format_string=format_string, delay=delay, filter=filter, bubble=bubble) try: from brotli import Compressor @@ -691,7 +690,7 @@ def __init__(self, filename, encoding=None, level=NOTSET, def _open(self, mode=None): if mode is None: mode = self._mode - self.stream = io.open(self._filename, mode) + self.stream = open(self._filename, mode) def write(self, item): if isinstance(item, str): @@ -700,7 +699,7 @@ def write(self, item): if ret: self.ensure_stream_is_open() self.stream.write(ret) - super(BrotliCompressionHandler, self).flush() + super().flush() def should_flush(self): return False @@ -711,14 +710,14 @@ def flush(self): if ret: self.ensure_stream_is_open() self.stream.write(ret) - super(BrotliCompressionHandler, self).flush() + super().flush() def close(self): if self._compressor is not None: self.ensure_stream_is_open() self.stream.write(self._compressor.finish()) self._compressor = None - super(BrotliCompressionHandler, self).close() + super().close() class MonitoringFileHandler(FileHandler): @@ -1321,7 +1320,7 @@ def collapse_mails(self, mail, related, reason): title = 'Other log records in the same group' else: title = 'Log records that led up to this one' - mail.set_payload('%s\r\n\r\n\r\n%s:\r\n\r\n%s' % ( + mail.set_payload('{}\r\n\r\n\r\n{}:\r\n\r\n{}'.format( mail.get_payload(), title, '\r\n\r\n'.join(body.rstrip() for body in related) @@ -1453,7 +1452,7 @@ class GMailHandler(MailHandler): """ def __init__(self, account_id, password, recipients, **kw): - super(GMailHandler, self).__init__( + super().__init__( account_id, recipients, secure=True, server_addr=("smtp.gmail.com", 587), credentials=(account_id, password), **kw) @@ -1552,11 +1551,11 @@ def __init__(self, application_name=None, address=None, if isinstance(address, string_types): self._connect_unixsocket() self.enveloper = self.unix_envelope - default_delimiter = u'\x00' + default_delimiter = '\x00' else: self._connect_netsocket() self.enveloper = self.net_envelope - default_delimiter = u'\n' + default_delimiter = '\n' self.record_delimiter = default_delimiter \ if record_delimiter is None else record_delimiter @@ -1569,7 +1568,7 @@ def _connect_unixsocket(self): self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) try: self.socket.connect(self.address) - except socket.error: + except OSError: self.socket.close() self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.socket.connect(self.address) @@ -1594,7 +1593,7 @@ def wrap_segments(self, record, before): for segment in segments) def unix_envelope(self, record): - before = u'<{}>{}'.format( + before = '<{}>{}'.format( self.encode_priority(record), self.application_name + ':' if self.application_name else '') return self.wrap_segments(record, before) @@ -1611,7 +1610,7 @@ def net_envelope(self, record): self.application_name = record.channel # RFC 5424: version timestamp hostname app-name procid # msgid structured-data message - before = u'<{}>1 {}Z {} {} {} - - '.format( + before = '<{}>1 {}Z {} {} {} - - '.format( self.encode_priority(record), record.time.isoformat(), socket.gethostname(), @@ -1630,7 +1629,7 @@ def send_to_socket(self, data): if self.unixsocket: try: self.socket.send(data) - except socket.error: + except OSError: self._connect_unixsocket() self.socket.send(data) elif self.socktype == socket.SOCK_DGRAM: diff --git a/logbook/helpers.py b/logbook/helpers.py index 4ea693f..7437ca7 100644 --- a/logbook/helpers.py +++ b/logbook/helpers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ logbook.helpers ~~~~~~~~~~~~~~~ @@ -171,7 +170,7 @@ def rename(src, dst): e = sys.exc_info()[1] if e.errno not in (errno.EEXIST, errno.EACCES): raise - old = "%s-%08x" % (dst, random.randint(0, 2 ** 31 - 1)) + old = f"{dst}-{random.randint(0, 2 ** 31 - 1):08x}" os.rename(dst, old) os.rename(src, dst) try: @@ -266,7 +265,7 @@ def get_application_name(): return os.path.basename(sys.argv[0]).title() -class cached_property(object): +class cached_property: """A property that is lazily calculated and then cached.""" def __init__(self, func, name=None, doc=None): diff --git a/logbook/more.py b/logbook/more.py index 50a23be..aa3e484 100644 --- a/logbook/more.py +++ b/logbook/more.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ logbook.more ~~~~~~~~~~~~ @@ -146,9 +145,9 @@ class TaggingHandler(Handler): def __init__(self, handlers, filter=None, bubble=False): Handler.__init__(self, NOTSET, filter, bubble) assert isinstance(handlers, dict) - self._handlers = dict( - (tag, isinstance(handler, Handler) and [handler] or handler) - for (tag, handler) in iteritems(handlers)) + self._handlers = { + tag: isinstance(handler, Handler) and [handler] or handler + for (tag, handler) in iteritems(handlers)} def emit(self, record): for tag in record.extra.get('tags', ()): @@ -256,7 +255,7 @@ def emit(self, record): self.slack.chat.post_message(channel=self.channel, text=self.format(record)) -class JinjaFormatter(object): +class JinjaFormatter: """A formatter object that makes it easy to format using a Jinja 2 template instead of a format string. """ @@ -320,7 +319,7 @@ def emit(self, record): c.wait() -class ColorizingStreamHandlerMixin(object): +class ColorizingStreamHandlerMixin: """A mixin class that does colorizing. .. versionadded:: 0.3 @@ -366,7 +365,7 @@ def get_color(self, record): return 'lightgray' def format(self, record): - rv = super(ColorizingStreamHandlerMixin, self).format(record) + rv = super().format(record) if self.should_colorize(record): color = self.get_color(record) if color: @@ -539,8 +538,8 @@ def __init__(self, elif message_type == "test": self.transport = riemann_client.transport.BlankTransport else: - msg = ("Currently supported message types for RiemannHandler are: {0}. \ - {1} is not supported." + msg = ("Currently supported message types for RiemannHandler are: {}. \ + {} is not supported." .format(",".join(["tcp", "udp", "test"]), message_type)) raise RuntimeError(msg) @@ -560,7 +559,7 @@ def record_to_event(self, record): "time": int(time()), "ttl": self.ttl, "host": platform.node(), - "service": "{0}.{1}".format(channel_name, os.getpid()), + "service": f"{channel_name}.{os.getpid()}", "state": state } diff --git a/logbook/notifiers.py b/logbook/notifiers.py index ce9468a..b5c98d1 100644 --- a/logbook/notifiers.py +++ b/logbook/notifiers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ logbook.notifiers ~~~~~~~~~~~~~~~~~ @@ -81,7 +80,7 @@ def __init__(self, application_name=None, icon=None, host=None, if icon is not None: if not os.path.isfile(icon): - raise IOError('Filename to an icon expected.') + raise OSError('Filename to an icon expected.') icon = self._growl.Image.imageFromPath(icon) else: try: @@ -273,7 +272,7 @@ def __init__(self, application_name=None, apikey=None, userkey=None, record_delta=None, level=NOTSET, filter=None, bubble=False, max_title_len=100, max_message_len=512): - super(PushoverHandler, self).__init__(None, record_limit, record_delta, + super().__init__(None, record_limit, record_delta, level, filter, bubble) self.application_name = application_name @@ -296,7 +295,7 @@ def __init__(self, application_name=None, apikey=None, userkey=None, def _crop(self, msg, max_len): if max_len is not None and max_len > 0 and len(msg) > max_len: - return "%s..." % (msg[:max_len-3],) + return f"{msg[:max_len-3]}..." else: return msg diff --git a/logbook/queues.py b/logbook/queues.py index 0b76f0a..abb3040 100644 --- a/logbook/queues.py +++ b/logbook/queues.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ logbook.queues ~~~~~~~~~~~~~~ @@ -241,7 +240,7 @@ def __del__(self): self.close(linger=100) -class ThreadController(object): +class ThreadController: """A helper class used by queue subscribers to control the background thread. This is usually created and started in one go by :meth:`~logbook.queues.ZeroMQSubscriber.dispatch_in_background` or @@ -279,7 +278,7 @@ def _target(self): self.setup.pop_thread() -class SubscriberBase(object): +class SubscriberBase: """Baseclass for all subscribers.""" def recv(self, timeout=None): @@ -599,12 +598,12 @@ def recv(self, timeout=None): return LogRecord.from_dict(rv) -class TWHThreadController(object): +class TWHThreadController: """A very basic thread controller that pulls things in from a queue and sends it to a handler. Both queue and handler are taken from the passed :class:`ThreadedWrapperHandler`. """ - class Command(object): + class Command: stop = object() emit = object() emit_batch = object() diff --git a/logbook/ticketing.py b/logbook/ticketing.py index 7321fa3..22cc8e3 100644 --- a/logbook/ticketing.py +++ b/logbook/ticketing.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ logbook.ticketing ~~~~~~~~~~~~~~~~~ @@ -16,7 +15,7 @@ from logbook.helpers import cached_property, b, PY2, u -class Ticket(object): +class Ticket: """Represents a ticket from the database.""" level_name = level_name_property() @@ -71,7 +70,7 @@ def __init__(self, db, row): self.occurrence_id = row['occurrence_id'] -class BackendBase(object): +class BackendBase: """Provides an abstract interface to various databases.""" def __init__(self, **options): diff --git a/logbook/utils.py b/logbook/utils.py index 21df7cc..0f67543 100644 --- a/logbook/utils.py +++ b/logbook/utils.py @@ -7,7 +7,7 @@ from .helpers import string_types -class _SlowContextNotifier(object): +class _SlowContextNotifier: def __init__(self, threshold, func): self.timer = threading.Timer(threshold, func) @@ -93,13 +93,13 @@ def _write_deprecations_if_needed(message, frame_correction): def log_deprecation_message(message, frame_correction=0): - _write_deprecations_if_needed("Deprecation message: {0}".format(message), frame_correction=frame_correction+1) + _write_deprecations_if_needed(f"Deprecation message: {message}", frame_correction=frame_correction+1) -class _DeprecatedFunction(object): +class _DeprecatedFunction: def __init__(self, func, message, obj=None, objtype=None): - super(_DeprecatedFunction, self).__init__() + super().__init__() self._func = func self._message = message self._obj = obj @@ -116,9 +116,9 @@ def _get_underlying_func(self): def __call__(self, *args, **kwargs): func = self._get_underlying_func() - warning = "{0} is deprecated.".format(self._get_func_str()) + warning = f"{self._get_func_str()} is deprecated." if self._message is not None: - warning += " {0}".format(self._message) + warning += f" {self._message}" _write_deprecations_if_needed(warning, frame_correction=+1) if self._obj is not None: return func(self._obj, *args, **kwargs) @@ -129,8 +129,8 @@ def __call__(self, *args, **kwargs): def _get_func_str(self): func = self._get_underlying_func() if self._objtype is not None: - return '{0}.{1}'.format(self._objtype.__name__, func.__name__) - return '{0}.{1}'.format(func.__module__, func.__name__) + return f'{self._objtype.__name__}.{func.__name__}' + return f'{func.__module__}.{func.__name__}' def __get__(self, obj, objtype): return self.bound_to(obj, objtype) @@ -149,7 +149,7 @@ def __doc__(self): if returned: # pylint: disable=no-member returned += "\n.. deprecated\n" # pylint: disable=no-member if self._message: - returned += " {0}".format( + returned += " {}".format( self._message) # pylint: disable=no-member return returned diff --git a/scripts/make-release.py b/scripts/make-release.py index 23cbdc7..019de2a 100644 --- a/scripts/make-release.py +++ b/scripts/make-release.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- """ make-release ~~~~~~~~~~~~ @@ -24,7 +23,7 @@ def parse_changelog(): with open('CHANGES') as f: lineiter = iter(f) for line in lineiter: - match = re.search('^Version\s+(.*)', line.strip()) + match = re.search(r'^Version\s+(.*)', line.strip()) if match is None: continue version = match.group(1).strip() @@ -79,7 +78,7 @@ def inject_version(match): def set_version(version): info('Setting version to %s', version) with open('logbook/__version__.py', 'w') as f: - f.write('__version__ = {!r}'.format(version)) + f.write(f'__version__ = {version!r}') def fail(message, *args): diff --git a/tests/conftest.py b/tests/conftest.py index 0a12dd5..251e501 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -29,10 +29,10 @@ def test_handler(): return logbook.TestHandler() -class ActivationStrategy(object): +class ActivationStrategy: def __init__(self, handler): - super(ActivationStrategy, self).__init__() + super().__init__() self.handler = handler def activate(self): diff --git a/tests/test_ci.py b/tests/test_ci.py index 5486163..2dd0c58 100644 --- a/tests/test_ci.py +++ b/tests/test_ci.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os import pytest diff --git a/tests/test_deadlock.py b/tests/test_deadlock.py index 66e4ad1..bce0094 100644 --- a/tests/test_deadlock.py +++ b/tests/test_deadlock.py @@ -2,7 +2,7 @@ import logbook -class MyObject(object): +class MyObject: def __init__(self, logger_func): self._logger_func = logger_func @@ -11,7 +11,7 @@ def __str__(self): return "" -class FakeLock(object): +class FakeLock: def __init__(self): self._acquired = False self._deadlock_occurred = False diff --git a/tests/test_file_handler.py b/tests/test_file_handler.py index 9585832..fe4dff0 100644 --- a/tests/test_file_handler.py +++ b/tests/test_file_handler.py @@ -120,7 +120,7 @@ def fake_record(message, year, month, day, hour=0, files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith('trot')) - assert files == ['trot-2010-01-0{0}.log'.format(i) + assert files == [f'trot-2010-01-0{i}.log' for i in xrange(5, 9)][-backup_count:] with open(str(tmpdir.join('trot-2010-01-08.log'))) as f: assert f.readline().rstrip() == '[01:00] Last One' @@ -158,7 +158,7 @@ def fake_record(message, year, month, day, hour=0, files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith('trot')) - assert files == ['trot.log.2010-01-0{0}'.format(i) + assert files == [f'trot.log.2010-01-0{i}' for i in xrange(5, 9)][-backup_count:] with open(str(tmpdir.join('trot.log.2010-01-08'))) as f: assert f.readline().rstrip() == '[01:00] Last One' @@ -210,7 +210,7 @@ def fake_record(message, year, month, day, hour=0, computed_files = [x for x in os.listdir(str(tmpdir)) if x.startswith('trot')] expected_files = ['trot.log.2010-01-01'] if preexisting_file else [] - expected_files += ['trot.log.2010-01-0{0}'.format(i) for i in xrange(5, 8)] + expected_files += [f'trot.log.2010-01-0{i}' for i in xrange(5, 8)] expected_files += ['trot.log'] expected_files = expected_files[-backup_count:] diff --git a/tests/test_helpers.py b/tests/test_helpers.py index ec2fd67..23f9d97 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - from logbook.helpers import u from datetime import datetime @@ -9,7 +7,7 @@ def test_jsonhelper(): from logbook.helpers import to_safe_json - class Bogus(object): + class Bogus: def __str__(self): return 'bogus' diff --git a/tests/test_mail_handler.py b/tests/test_mail_handler.py index 9f1fc48..f55e77a 100644 --- a/tests/test_mail_handler.py +++ b/tests/test_mail_handler.py @@ -10,7 +10,7 @@ try: from unittest.mock import Mock, call, patch except ImportError: - from mock import Mock, call, patch + from unittest.mock import Mock, call, patch __file_without_pyc__ = __file__ if __file_without_pyc__.endswith('.pyc'): diff --git a/tests/test_more.py b/tests/test_more.py index 0d871d5..ebe8b38 100644 --- a/tests/test_more.py +++ b/tests/test_more.py @@ -32,7 +32,7 @@ def test_colorizing_support(logger): class TestColorizingHandler(ColorizedStderrHandler): def __init__(self, *args, **kwargs): - super(TestColorizingHandler, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._obj_stream = StringIO() @property @@ -116,12 +116,12 @@ def test_external_application_handler(tmpdir, logger): from logbook.more import ExternalApplicationHandler as Handler fn = tmpdir.join('tempfile') handler = Handler([sys.executable, '-c', r'''if 1: - f = open(%(tempfile)s, 'w') + f = open({tempfile}, 'w') try: - f.write('{record.message}\n') + f.write('{{record.message}}\n') finally: f.close() - ''' % {'tempfile': repr(str(fn))}]) + '''.format(tempfile=repr(str(fn)))]) with handler: logger.error('this is a really bad idea') with fn.open() as rf: @@ -161,7 +161,7 @@ def test_dedup_handler(logger): assert 'message repeated 1 times: bar' in test_handler.records[1].message -class TestRiemannHandler(object): +class TestRiemannHandler: @require_module("riemann_client") def test_happy_path(self, logger): diff --git a/tests/test_nteventlog_handler.py b/tests/test_nteventlog_handler.py index 9c27beb..b89e25f 100644 --- a/tests/test_nteventlog_handler.py +++ b/tests/test_nteventlog_handler.py @@ -29,8 +29,7 @@ def test_nteventlog_handler(): def iter_event_log(handle, flags, offset): while True: events = ReadEventLog(handle, flags, offset) - for event in events: - yield event + yield from events if not events: break diff --git a/tests/test_processors.py b/tests/test_processors.py index daaf92a..1e8dd29 100644 --- a/tests/test_processors.py +++ b/tests/test_processors.py @@ -25,7 +25,7 @@ def test_handler_filter_after_processor(activation_strategy, logger): filter=lambda r, h: 'ip' in r.extra, bubble=False) - class Request(object): + class Request: remote_addr = '127.0.0.1' method = 'GET' path = '/index.html' @@ -72,7 +72,7 @@ def test_handler_processors(activation_strategy, logger): {record.message} ''').lstrip()) - class Request(object): + class Request: remote_addr = '127.0.0.1' method = 'GET' path = '/index.html' diff --git a/tests/test_queues.py b/tests/test_queues.py index 1c6babd..bfde9a4 100644 --- a/tests/test_queues.py +++ b/tests/test_queues.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os import socket import time @@ -59,7 +58,7 @@ def test_missing_zeromq(): ZeroMQSubscriber('tcp://127.0.0.1:42000') -class MultiProcessingHandlerSendBack(object): +class MultiProcessingHandlerSendBack: def __init__(self, queue): self.queue = queue @@ -94,16 +93,16 @@ def test_multi_processing_handler(): class BatchTestHandler(logbook.TestHandler): def __init__(self, *args, **kwargs): - super(BatchTestHandler, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.batches = [] def emit(self, record): - super(BatchTestHandler, self).emit(record) + super().emit(record) self.batches.append([record]) def emit_batch(self, records, reason): for record in records: - super(BatchTestHandler, self).emit(record) + super().emit(record) self.batches.append(records) @@ -120,7 +119,7 @@ def test_threaded_wrapper_handler(logger): assert (not handler.controller.running) assert len(test_handler.records) == 2 assert len(test_handler.batches) == 2 - assert all((len(records) == 1 for records in test_handler.batches)) + assert all(len(records) == 1 for records in test_handler.batches) assert test_handler.has_warning('Just testing') assert test_handler.has_error('More testing') @@ -140,7 +139,7 @@ def test_threaded_wrapper_handler_emit(): assert (not handler.controller.running) assert len(test_handler.records) == 2 assert len(test_handler.batches) == 2 - assert all((len(records) == 1 for records in test_handler.batches)) + assert all(len(records) == 1 for records in test_handler.batches) assert test_handler.has_warning('Just testing') assert test_handler.has_error('More testing') @@ -186,7 +185,7 @@ def run_on_remote(channel): gw.exit() -class SubscriberGroupSendBack(object): +class SubscriberGroupSendBack: def __init__(self, message, queue): self.message = message self.queue = queue @@ -226,7 +225,7 @@ def test_redis_handler(): import redis from logbook.queues import RedisHandler - KEY = 'redis-{}'.format(os.getpid()) + KEY = f'redis-{os.getpid()}' FIELDS = ['message', 'host'] r = redis.Redis(REDIS_HOST, REDIS_PORT, decode_responses=True) redis_handler = RedisHandler(key=KEY, level=logbook.INFO, bubble=True) @@ -247,7 +246,7 @@ def test_redis_handler(): assert message.find(LETTERS) # Change the key of the handler and check on redis - KEY = 'test_another_key-{}'.format(os.getpid()) + KEY = f'test_another_key-{os.getpid()}' redis_handler.key = KEY with null_handler.applicationbound(): @@ -296,7 +295,7 @@ def test_redis_handler_lpush(): from logbook.queues import RedisHandler null_handler = logbook.NullHandler() - KEY = 'lpushed-'.format(os.getpid()) + KEY = f'lpushed-' redis_handler = RedisHandler(key=KEY, push_method='lpush', level=logbook.INFO, bubble=True) diff --git a/tests/test_syslog_handler.py b/tests/test_syslog_handler.py index 99447ef..7532d0b 100644 --- a/tests/test_syslog_handler.py +++ b/tests/test_syslog_handler.py @@ -38,7 +38,7 @@ def test_syslog_handler(logger, activation_strategy, sock_family, socktype, addr inc.settimeout(1) if UNIX_SOCKET_AVAILABLE and sock_family == socket.AF_UNIX: - expected = (r'^<12>%stestlogger: Syslog is weird%s$' % (app_name + ':' if app_name else '', delimiter)) + expected = (r'^<12>{}testlogger: Syslog is weird{}$'.format(app_name + ':' if app_name else '', delimiter)) else: expected = (r'^<12>1 \d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?Z %s %s %d - - %sSyslog is weird%s$' % ( socket.gethostname(), @@ -59,7 +59,7 @@ def test_syslog_handler(logger, activation_strategy, sock_family, socktype, addr rv = rv.decode('utf-8') assert re.match(expected, rv), \ - 'expected {}, got {}'.format(expected, rv) + f'expected {expected}, got {rv}' @pytest.fixture diff --git a/tests/test_unicode.py b/tests/test_unicode.py index 96ff00c..5c3a126 100644 --- a/tests/test_unicode.py +++ b/tests/test_unicode.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .utils import require_py3, capturing_stderr_context import logbook @@ -52,12 +51,12 @@ def test_custom_string_format_unicode(logger): @require_py3 def test_unicode_message_encoded_params(logger): with capturing_stderr_context() as stream: - logger.warn("\u2603 {0}", "\u2603".encode('utf8')) + logger.warn("\u2603 {0}", "\u2603".encode()) assert "WARNING: testlogger: \u2603 b'\\xe2\\x98\\x83'" in stream.getvalue() @require_py3 def test_encoded_message_unicode_params(logger): with capturing_stderr_context() as stream: - logger.warn('\u2603 {0}'.encode('utf8'), '\u2603') + logger.warn('\u2603 {0}'.encode(), '\u2603') assert 'WARNING: testlogger: \u2603 \u2603' in stream.getvalue() diff --git a/tests/test_utils.py b/tests/test_utils.py index f4ca5b8..23de89a 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -11,7 +11,7 @@ try: from unittest.mock import Mock, call except ImportError: - from mock import Mock, call + from unittest.mock import Mock, call def test_logged_if_slow_reached(test_handler): @@ -104,7 +104,7 @@ def _no_decorator(func): @pytest.mark.parametrize('decorator', [_no_decorator, classmethod]) def test_class_deprecation(capture, decorator): - class Bla(object): + class Bla: @deprecated('reason') @classmethod @@ -214,7 +214,7 @@ def other_func(): """ assert ".. deprecated" in some_func.__doc__ - assert ".. deprecated\n {0}".format(message) in other_func.__doc__ + assert f".. deprecated\n {message}" in other_func.__doc__ @pytest.fixture diff --git a/tests/utils.py b/tests/utils.py index d014cca..ec81f30 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ test utils for logbook ~~~~~~~~~~~~~~~~~~~~~~ @@ -47,7 +46,7 @@ def require_module(module_name): found = False return pytest.mark.skipif( - not found, reason='Module {0} is required'.format(module_name)) + not found, reason=f'Module {module_name} is required') def make_fake_mail_handler(**kwargs): From 9880c3ee9148cfe7efcb390bfe3dc5730fb5caaf Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 25 Jun 2023 21:57:05 +0200 Subject: [PATCH 12/70] pre-commit: isort --- benchmark/bench_disabled_introspection.py | 3 +- benchmark/bench_disabled_logger.py | 1 - benchmark/bench_enabled_introspection.py | 3 +- benchmark/bench_file_handler.py | 2 +- benchmark/bench_file_handler_unicode.py | 2 +- benchmark/bench_logger_level_low.py | 2 +- benchmark/bench_logging_file_handler.py | 3 +- .../bench_logging_file_handler_unicode.py | 3 +- benchmark/bench_logging_logger_creation.py | 1 - benchmark/bench_logging_logger_level_low.py | 4 +- benchmark/bench_logging_noop.py | 4 +- benchmark/bench_logging_noop_filter.py | 4 +- benchmark/bench_logging_stream_handler.py | 2 +- benchmark/bench_noop.py | 2 +- benchmark/bench_noop_filter.py | 2 +- benchmark/bench_noop_filter_on_handler.py | 2 +- benchmark/bench_redirect_from_logging.py | 5 +- benchmark/bench_redirect_to_logging.py | 5 +- benchmark/bench_stack_manipulation.py | 5 +- benchmark/bench_stream_handler.py | 2 +- benchmark/bench_test_handler.py | 1 - benchmark/run.py | 2 +- docs/conf.py | 2 +- logbook/__init__.py | 56 ++++++++++++++---- logbook/_fallback.py | 17 ++++-- logbook/_speedups.pyx | 25 ++++++-- logbook/base.py | 30 +++++++--- logbook/compat.py | 2 +- logbook/concurrency.py | 15 ++--- logbook/handlers.py | 59 ++++++++++++++----- logbook/helpers.py | 7 ++- logbook/more.py | 22 +++---- logbook/notifiers.py | 6 +- logbook/queues.py | 11 ++-- logbook/ticketing.py | 11 ++-- logbook/utils.py | 4 +- pyproject.toml | 3 + scripts/make-release.py | 8 +-- setup.py | 1 - tests/conftest.py | 5 +- tests/test_asyncio.py | 4 +- tests/test_ci.py | 1 + tests/test_deadlock.py | 1 + tests/test_file_handler.py | 10 ++-- tests/test_flags.py | 4 +- tests/test_handler_errors.py | 4 +- tests/test_helpers.py | 3 +- tests/test_logbook.py | 4 +- tests/test_logger.py | 3 +- tests/test_logging_api.py | 6 +- tests/test_logging_compat.py | 9 +-- tests/test_logging_times.py | 4 +- tests/test_more.py | 7 ++- tests/test_nteventlog_handler.py | 15 +++-- tests/test_queues.py | 9 ++- tests/test_syslog_handler.py | 3 +- tests/test_ticketing.py | 6 +- tests/test_unicode.py | 4 +- tests/test_utils.py | 13 ++-- tests/utils.py | 4 +- 60 files changed, 296 insertions(+), 162 deletions(-) diff --git a/benchmark/bench_disabled_introspection.py b/benchmark/bench_disabled_introspection.py index d0693b1..2d3450c 100644 --- a/benchmark/bench_disabled_introspection.py +++ b/benchmark/bench_disabled_introspection.py @@ -1,6 +1,5 @@ """Tests with frame introspection disabled""" -from logbook import Logger, NullHandler, Flags - +from logbook import Flags, Logger, NullHandler log = Logger('Test logger') diff --git a/benchmark/bench_disabled_logger.py b/benchmark/bench_disabled_logger.py index e7d50e2..3dd9f3c 100644 --- a/benchmark/bench_disabled_logger.py +++ b/benchmark/bench_disabled_logger.py @@ -1,7 +1,6 @@ """Tests with the whole logger disabled""" from logbook import Logger - log = Logger('Test logger') log.disabled = True diff --git a/benchmark/bench_enabled_introspection.py b/benchmark/bench_enabled_introspection.py index 67b62ec..a30d574 100644 --- a/benchmark/bench_enabled_introspection.py +++ b/benchmark/bench_enabled_introspection.py @@ -1,6 +1,5 @@ """Tests with stack frame introspection enabled""" -from logbook import Logger, NullHandler, Flags - +from logbook import Flags, Logger, NullHandler log = Logger('Test logger') diff --git a/benchmark/bench_file_handler.py b/benchmark/bench_file_handler.py index bed578e..dcf1451 100644 --- a/benchmark/bench_file_handler.py +++ b/benchmark/bench_file_handler.py @@ -1,7 +1,7 @@ """Benchmarks the file handler""" -from logbook import Logger, FileHandler from tempfile import NamedTemporaryFile +from logbook import FileHandler, Logger log = Logger('Test logger') diff --git a/benchmark/bench_file_handler_unicode.py b/benchmark/bench_file_handler_unicode.py index 2cee8e1..73031b7 100644 --- a/benchmark/bench_file_handler_unicode.py +++ b/benchmark/bench_file_handler_unicode.py @@ -1,7 +1,7 @@ """Benchmarks the file handler with unicode""" -from logbook import Logger, FileHandler from tempfile import NamedTemporaryFile +from logbook import FileHandler, Logger log = Logger('Test logger') diff --git a/benchmark/bench_logger_level_low.py b/benchmark/bench_logger_level_low.py index a3e1342..9f4e953 100644 --- a/benchmark/bench_logger_level_low.py +++ b/benchmark/bench_logger_level_low.py @@ -1,7 +1,7 @@ """Benchmarks too low logger levels""" -from logbook import Logger, StreamHandler, ERROR from cStringIO import StringIO +from logbook import ERROR, Logger, StreamHandler log = Logger('Test logger') log.level = ERROR diff --git a/benchmark/bench_logging_file_handler.py b/benchmark/bench_logging_file_handler.py index 17b1a7e..e688c59 100644 --- a/benchmark/bench_logging_file_handler.py +++ b/benchmark/bench_logging_file_handler.py @@ -1,8 +1,7 @@ """Tests logging file handler in comparison""" -from logging import getLogger, FileHandler +from logging import FileHandler, getLogger from tempfile import NamedTemporaryFile - log = getLogger('Testlogger') diff --git a/benchmark/bench_logging_file_handler_unicode.py b/benchmark/bench_logging_file_handler_unicode.py index 8dfb18d..8a5a57a 100644 --- a/benchmark/bench_logging_file_handler_unicode.py +++ b/benchmark/bench_logging_file_handler_unicode.py @@ -1,8 +1,7 @@ """Tests logging file handler in comparison""" -from logging import getLogger, FileHandler +from logging import FileHandler, getLogger from tempfile import NamedTemporaryFile - log = getLogger('Testlogger') diff --git a/benchmark/bench_logging_logger_creation.py b/benchmark/bench_logging_logger_creation.py index 0d877e0..bbfca9b 100644 --- a/benchmark/bench_logging_logger_creation.py +++ b/benchmark/bench_logging_logger_creation.py @@ -1,7 +1,6 @@ """Test with no handler active""" from logging import getLogger - root_logger = getLogger() diff --git a/benchmark/bench_logging_logger_level_low.py b/benchmark/bench_logging_logger_level_low.py index 4f06596..55c1c8b 100644 --- a/benchmark/bench_logging_logger_level_low.py +++ b/benchmark/bench_logging_logger_level_low.py @@ -1,7 +1,7 @@ """Tests with a logging handler becoming a noop for comparison""" -from logging import getLogger, StreamHandler, ERROR -from cStringIO import StringIO +from logging import ERROR, StreamHandler, getLogger +from cStringIO import StringIO log = getLogger('Testlogger') log.setLevel(ERROR) diff --git a/benchmark/bench_logging_noop.py b/benchmark/bench_logging_noop.py index a3a8099..68d0bee 100644 --- a/benchmark/bench_logging_noop.py +++ b/benchmark/bench_logging_noop.py @@ -1,7 +1,7 @@ """Tests with a logging handler becoming a noop for comparison""" -from logging import getLogger, StreamHandler, ERROR -from cStringIO import StringIO +from logging import ERROR, StreamHandler, getLogger +from cStringIO import StringIO log = getLogger('Testlogger') diff --git a/benchmark/bench_logging_noop_filter.py b/benchmark/bench_logging_noop_filter.py index 6043c48..01a9bf7 100644 --- a/benchmark/bench_logging_noop_filter.py +++ b/benchmark/bench_logging_noop_filter.py @@ -1,7 +1,7 @@ """Tests with a filter disabling a handler for comparsion in logging""" -from logging import getLogger, StreamHandler, Filter -from cStringIO import StringIO +from logging import Filter, StreamHandler, getLogger +from cStringIO import StringIO log = getLogger('Testlogger') diff --git a/benchmark/bench_logging_stream_handler.py b/benchmark/bench_logging_stream_handler.py index a21d41f..8833675 100644 --- a/benchmark/bench_logging_stream_handler.py +++ b/benchmark/bench_logging_stream_handler.py @@ -1,7 +1,7 @@ """Tests the stream handler in logging""" from logging import Logger, StreamHandler -from cStringIO import StringIO +from cStringIO import StringIO log = Logger('Test logger') diff --git a/benchmark/bench_noop.py b/benchmark/bench_noop.py index 6503116..f2b7166 100644 --- a/benchmark/bench_noop.py +++ b/benchmark/bench_noop.py @@ -1,7 +1,7 @@ """Test with no handler active""" -from logbook import Logger, StreamHandler, NullHandler, ERROR from cStringIO import StringIO +from logbook import ERROR, Logger, NullHandler, StreamHandler log = Logger('Test logger') diff --git a/benchmark/bench_noop_filter.py b/benchmark/bench_noop_filter.py index 220adcd..5f31659 100644 --- a/benchmark/bench_noop_filter.py +++ b/benchmark/bench_noop_filter.py @@ -1,6 +1,6 @@ -from logbook import Logger, StreamHandler, NullHandler from cStringIO import StringIO +from logbook import Logger, NullHandler, StreamHandler log = Logger('Test logger') diff --git a/benchmark/bench_noop_filter_on_handler.py b/benchmark/bench_noop_filter_on_handler.py index fd9714a..38bb3eb 100644 --- a/benchmark/bench_noop_filter_on_handler.py +++ b/benchmark/bench_noop_filter_on_handler.py @@ -1,7 +1,7 @@ """Like the filter test, but with the should_handle implemented""" -from logbook import Logger, StreamHandler, NullHandler from cStringIO import StringIO +from logbook import Logger, NullHandler, StreamHandler log = Logger('Test logger') diff --git a/benchmark/bench_redirect_from_logging.py b/benchmark/bench_redirect_from_logging.py index 8835925..743a2f3 100644 --- a/benchmark/bench_redirect_from_logging.py +++ b/benchmark/bench_redirect_from_logging.py @@ -1,9 +1,10 @@ """Tests redirects from logging to logbook""" from logging import getLogger -from logbook import StreamHandler -from logbook.compat import redirect_logging + from cStringIO import StringIO +from logbook import StreamHandler +from logbook.compat import redirect_logging redirect_logging() log = getLogger('Test logger') diff --git a/benchmark/bench_redirect_to_logging.py b/benchmark/bench_redirect_to_logging.py index 4967855..ebdb592 100644 --- a/benchmark/bench_redirect_to_logging.py +++ b/benchmark/bench_redirect_to_logging.py @@ -1,8 +1,9 @@ """Tests redirects from logging to logbook""" -from logging import getLogger, StreamHandler -from logbook.compat import LoggingHandler +from logging import StreamHandler, getLogger + from cStringIO import StringIO +from logbook.compat import LoggingHandler log = getLogger('Test logger') diff --git a/benchmark/bench_stack_manipulation.py b/benchmark/bench_stack_manipulation.py index 0f546ea..cf2a7db 100644 --- a/benchmark/bench_stack_manipulation.py +++ b/benchmark/bench_stack_manipulation.py @@ -1,9 +1,10 @@ """Tests basic stack manipulation performance""" -from logbook import Handler, NullHandler, StreamHandler, FileHandler, \ - ERROR, WARNING from tempfile import NamedTemporaryFile + from cStringIO import StringIO +from logbook import ERROR, WARNING, FileHandler, Handler, NullHandler, StreamHandler + def run(): f = NamedTemporaryFile() diff --git a/benchmark/bench_stream_handler.py b/benchmark/bench_stream_handler.py index 9449d2e..fab2339 100644 --- a/benchmark/bench_stream_handler.py +++ b/benchmark/bench_stream_handler.py @@ -1,7 +1,7 @@ """Tests the stream handler""" -from logbook import Logger, StreamHandler from cStringIO import StringIO +from logbook import Logger, StreamHandler log = Logger('Test logger') diff --git a/benchmark/bench_test_handler.py b/benchmark/bench_test_handler.py index 68b4de1..d8facfc 100644 --- a/benchmark/bench_test_handler.py +++ b/benchmark/bench_test_handler.py @@ -1,7 +1,6 @@ """Tests the test handler""" from logbook import Logger, TestHandler - log = Logger('Test logger') diff --git a/benchmark/run.py b/benchmark/run.py index c114d10..eb61a35 100644 --- a/benchmark/run.py +++ b/benchmark/run.py @@ -2,9 +2,9 @@ """ Runs the benchmarks """ -import sys import os import re +import sys from subprocess import Popen try: diff --git a/docs/conf.py b/docs/conf.py index 8f91a6e..66a13e6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -11,8 +11,8 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the diff --git a/logbook/__init__.py b/logbook/__init__.py index 1a7fdd8..80677ef 100644 --- a/logbook/__init__.py +++ b/logbook/__init__.py @@ -10,19 +10,53 @@ """ import os + +from . import compat from .base import ( - LogRecord, Logger, LoggerGroup, NestedSetup, Processor, Flags, - get_level_name, lookup_level, dispatch_record, CRITICAL, ERROR, WARNING, - NOTICE, INFO, DEBUG, TRACE, NOTSET, set_datetime_format) + CRITICAL, + DEBUG, + ERROR, + INFO, + NOTICE, + NOTSET, + TRACE, + WARNING, + Flags, + Logger, + LoggerGroup, + LogRecord, + NestedSetup, + Processor, + dispatch_record, + get_level_name, + lookup_level, + set_datetime_format, +) from .handlers import ( - Handler, StreamHandler, FileHandler, MonitoringFileHandler, StderrHandler, - RotatingFileHandler, TimedRotatingFileHandler, TestHandler, MailHandler, - GMailHandler, SyslogHandler, NullHandler, NTEventLogHandler, - create_syshandler, StringFormatter, StringFormatterHandlerMixin, - HashingHandlerMixin, LimitingHandlerMixin, WrapperHandler, - FingersCrossedHandler, GroupHandler, GZIPCompressionHandler, BrotliCompressionHandler) -from . import compat - + BrotliCompressionHandler, + FileHandler, + FingersCrossedHandler, + GMailHandler, + GroupHandler, + GZIPCompressionHandler, + Handler, + HashingHandlerMixin, + LimitingHandlerMixin, + MailHandler, + MonitoringFileHandler, + NTEventLogHandler, + NullHandler, + RotatingFileHandler, + StderrHandler, + StreamHandler, + StringFormatter, + StringFormatterHandlerMixin, + SyslogHandler, + TestHandler, + TimedRotatingFileHandler, + WrapperHandler, + create_syshandler, +) # create an anonymous default logger and provide all important # methods of that logger as global functions diff --git a/logbook/_fallback.py b/logbook/_fallback.py index 68e4e94..71bb901 100644 --- a/logbook/_fallback.py +++ b/logbook/_fallback.py @@ -8,11 +8,20 @@ :license: BSD, see LICENSE for more details. """ from itertools import count -from logbook.helpers import get_iterator_next_method + from logbook.concurrency import ( - thread_get_ident, greenlet_get_ident, thread_local, greenlet_local, - ThreadLock, GreenletRLock, is_gevent_enabled, ContextVar, context_get_ident, - is_context_enabled) + ContextVar, + GreenletRLock, + ThreadLock, + context_get_ident, + greenlet_get_ident, + greenlet_local, + is_context_enabled, + is_gevent_enabled, + thread_get_ident, + thread_local, +) +from logbook.helpers import get_iterator_next_method _missing = object() _MAX_CONTEXT_OBJECT_CACHE = 256 diff --git a/logbook/_speedups.pyx b/logbook/_speedups.pyx index 6dea7c9..994836a 100644 --- a/logbook/_speedups.pyx +++ b/logbook/_speedups.pyx @@ -9,14 +9,27 @@ """ -from logbook.concurrency import (is_gevent_enabled, thread_get_ident, greenlet_get_ident, thread_local, - GreenletRLock, greenlet_local, ContextVar, context_get_ident, is_context_enabled) +from logbook.concurrency import ( + ContextVar, + GreenletRLock, + context_get_ident, + greenlet_get_ident, + greenlet_local, + is_context_enabled, + is_gevent_enabled, + thread_get_ident, + thread_local, +) from cpython.dict cimport PyDict_Clear, PyDict_SetItem -from cpython.list cimport PyList_Append, PyList_Sort, PyList_GET_SIZE - -from cpython.pythread cimport PyThread_type_lock, PyThread_allocate_lock, \ - PyThread_release_lock, PyThread_acquire_lock, WAIT_LOCK +from cpython.list cimport PyList_Append, PyList_GET_SIZE, PyList_Sort +from cpython.pythread cimport ( + WAIT_LOCK, + PyThread_acquire_lock, + PyThread_allocate_lock, + PyThread_release_lock, + PyThread_type_lock, +) _missing = object() diff --git a/logbook/base.py b/logbook/base.py index cff8546..4772717 100644 --- a/logbook/base.py +++ b/logbook/base.py @@ -15,12 +15,18 @@ from itertools import chain from weakref import ref as weakref -from logbook.concurrency import (greenlet_get_ident, thread_get_ident, - thread_get_name) - -from logbook.helpers import (PY2, cached_property, integer_types, iteritems, - parse_iso8601, string_types, to_safe_json, u, - xrange) +from logbook.concurrency import greenlet_get_ident, thread_get_ident, thread_get_name +from logbook.helpers import ( + PY2, + cached_property, + integer_types, + iteritems, + parse_iso8601, + string_types, + to_safe_json, + u, + xrange, +) _has_speedups = False try: @@ -28,12 +34,20 @@ raise ImportError("Speedups disabled via DISABLE_LOGBOOK_CEXT_AT_RUNTIME") from logbook._speedups import ( - _missing, group_reflected_property, ContextStackManager, StackedObject) + ContextStackManager, + StackedObject, + _missing, + group_reflected_property, + ) _has_speedups = True except ImportError: from logbook._fallback import ( - _missing, group_reflected_property, ContextStackManager, StackedObject) + ContextStackManager, + StackedObject, + _missing, + group_reflected_property, + ) _datetime_factory = datetime.utcnow diff --git a/logbook/compat.py b/logbook/compat.py index 60f47d5..874e57d 100644 --- a/logbook/compat.py +++ b/logbook/compat.py @@ -15,7 +15,7 @@ from datetime import date, datetime import logbook -from logbook.helpers import u, string_types, iteritems, collections_abc +from logbook.helpers import collections_abc, iteritems, string_types, u _epoch_ord = date(1970, 1, 1).toordinal() diff --git a/logbook/concurrency.py b/logbook/concurrency.py index dd715c8..cd35cc2 100644 --- a/logbook/concurrency.py +++ b/logbook/concurrency.py @@ -38,9 +38,9 @@ def is_gevent_enabled(): thread_get_ident = _get_original('threading', '_get_ident') thread_local = _get_original('threading', 'local') - from gevent.thread import get_ident as greenlet_get_ident from gevent.local import local as greenlet_local from gevent.lock import BoundedSemaphore + from gevent.thread import get_ident as greenlet_get_ident from gevent.threading import __threading__ def thread_get_name(): @@ -128,14 +128,15 @@ def _get_greenlet_lock(self): def _is_owned(self): return self._owner == (thread_get_ident(), greenlet_get_ident()) else: - from threading import ( - Lock as ThreadLock, RLock as ThreadRLock, currentThread) + from threading import Lock as ThreadLock + from threading import RLock as ThreadRLock + from threading import currentThread try: - from thread import ( - get_ident as thread_get_ident, _local as thread_local) + from thread import _local as thread_local + from thread import get_ident as thread_get_ident except ImportError: - from _thread import ( - get_ident as thread_get_ident, _local as thread_local) + from _thread import _local as thread_local + from _thread import get_ident as thread_get_ident def thread_get_name(): return currentThread().getName() diff --git a/logbook/handlers.py b/logbook/handlers.py index 8abebae..17fa842 100644 --- a/logbook/handlers.py +++ b/logbook/handlers.py @@ -7,33 +7,60 @@ :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ +import errno +import gzip import io +import math import os import re -import sys -import stat -import errno import socket -import gzip -import math +import stat +import sys + try: from hashlib import sha1 except ImportError: from sha import new as sha1 -import traceback + import collections -from datetime import datetime, timedelta +import traceback from collections import deque +from datetime import datetime, timedelta from textwrap import dedent from logbook.base import ( - CRITICAL, ERROR, WARNING, NOTICE, INFO, DEBUG, TRACE, NOTSET, level_name_property, - _missing, lookup_level, Flags, ContextObject, ContextStackManager, - _datetime_factory) -from logbook.helpers import ( - rename, b, _is_text_stream, is_unicode, PY2, zip, xrange, string_types, collections_abc, - integer_types, reraise, u, with_metaclass) + CRITICAL, + DEBUG, + ERROR, + INFO, + NOTICE, + NOTSET, + TRACE, + WARNING, + ContextObject, + ContextStackManager, + Flags, + _datetime_factory, + _missing, + level_name_property, + lookup_level, +) from logbook.concurrency import new_fine_grained_lock +from logbook.helpers import ( + PY2, + _is_text_stream, + b, + collections_abc, + integer_types, + is_unicode, + rename, + reraise, + string_types, + u, + with_metaclass, + xrange, + zip, +) DEFAULT_FORMAT_STRING = u( '[{record.time:%Y-%m-%d %H:%M:%S.%f%z}] ' @@ -1262,8 +1289,8 @@ def message_from_record(self, record, suppressed): (:class:`email.message.Message`). `suppressed` is the number of mails not sent if the `record_limit` feature is active. """ - from email.message import Message from email.header import Header + from email.message import Message msg = Message() msg.set_charset('utf-8') lineiter = iter(self.format(record).splitlines()) @@ -1331,7 +1358,7 @@ def get_connection(self): """Returns an SMTP connection. By default it reconnects for each sent mail. """ - from smtplib import SMTP, SMTP_SSL, SMTP_PORT, SMTP_SSL_PORT + from smtplib import SMTP, SMTP_PORT, SMTP_SSL, SMTP_SSL_PORT if self.server_addr is None: host = '127.0.0.1' port = self.secure and SMTP_SSL_PORT or SMTP_PORT @@ -1662,8 +1689,8 @@ def __init__(self, application_name, log_type='Application', 'operating system.') try: - import win32evtlogutil import win32evtlog + import win32evtlogutil except ImportError: raise RuntimeError('The pywin32 library is required ' 'for the NTEventLogHandler.') diff --git a/logbook/helpers.py b/logbook/helpers.py index 7437ca7..d7b7b03 100644 --- a/logbook/helpers.py +++ b/logbook/helpers.py @@ -7,19 +7,20 @@ :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ +import errno import os +import random import re import sys -import errno import time -import random from datetime import datetime, timedelta PY2 = sys.version_info[0] == 2 if PY2: - import __builtin__ as _builtins import collections as collections_abc + + import __builtin__ as _builtins else: import builtins as _builtins import collections.abc as collections_abc diff --git a/logbook/more.py b/logbook/more.py index aa3e484..f09097d 100644 --- a/logbook/more.py +++ b/logbook/more.py @@ -7,21 +7,23 @@ :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ -import re import os import platform - +import re from collections import defaultdict from functools import partial -from logbook.base import ( - RecordDispatcher, dispatch_record, NOTSET, ERROR, NOTICE) -from logbook.handlers import ( - Handler, StringFormatter, StringFormatterHandlerMixin, StderrHandler) from logbook._termcolors import colorize -from logbook.helpers import PY2, string_types, iteritems, u -from logbook.ticketing import TicketingHandler as DatabaseHandler +from logbook.base import ERROR, NOTICE, NOTSET, RecordDispatcher, dispatch_record +from logbook.handlers import ( + Handler, + StderrHandler, + StringFormatter, + StringFormatterHandlerMixin, +) +from logbook.helpers import PY2, iteritems, string_types, u from logbook.ticketing import BackendBase +from logbook.ticketing import TicketingHandler as DatabaseHandler try: import riemann_client.client @@ -33,6 +35,7 @@ if PY2: from urllib import urlencode + from urlparse import parse_qsl else: from urllib.parse import parse_qsl, urlencode @@ -398,8 +401,7 @@ def __init__(self, *args, **kwargs): # backwards compat. Should go away in some future releases -from logbook.handlers import ( - FingersCrossedHandler as FingersCrossedHandlerBase) +from logbook.handlers import FingersCrossedHandler as FingersCrossedHandlerBase class FingersCrossedHandler(FingersCrossedHandlerBase): diff --git a/logbook/notifiers.py b/logbook/notifiers.py index b5c98d1..ee738f3 100644 --- a/logbook/notifiers.py +++ b/logbook/notifiers.py @@ -7,14 +7,14 @@ :copyright: (c) 2010 by Armin Ronacher, Christopher Grebs. :license: BSD, see LICENSE for more details. """ +import base64 import os import sys -import base64 from time import time -from logbook.base import NOTSET, ERROR, WARNING +from logbook.base import ERROR, NOTSET, WARNING from logbook.handlers import Handler, LimitingHandlerMixin -from logbook.helpers import get_application_name, PY2, http_client, u +from logbook.helpers import PY2, get_application_name, http_client, u if PY2: from urllib import urlencode diff --git a/logbook/queues.py b/logbook/queues.py index abb3040..2d3a0ae 100644 --- a/logbook/queues.py +++ b/logbook/queues.py @@ -8,17 +8,20 @@ :license: BSD, see LICENSE for more details. """ import json -import threading -from threading import Thread, Lock import platform +import threading +from threading import Lock, Thread + from logbook.base import NOTSET, LogRecord, dispatch_record from logbook.handlers import Handler, WrapperHandler from logbook.helpers import PY2, u if PY2: - from Queue import Empty, Full, Queue as ThreadQueue + from Queue import Empty, Full + from Queue import Queue as ThreadQueue else: - from queue import Empty, Full, Queue as ThreadQueue + from queue import Empty, Full + from queue import Queue as ThreadQueue class RedisHandler(Handler): diff --git a/logbook/ticketing.py b/logbook/ticketing.py index 22cc8e3..0de613e 100644 --- a/logbook/ticketing.py +++ b/logbook/ticketing.py @@ -8,11 +8,12 @@ :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ -from time import time import json -from logbook.base import NOTSET, level_name_property, LogRecord +from time import time + +from logbook.base import NOTSET, LogRecord, level_name_property from logbook.handlers import Handler, HashingHandlerMixin -from logbook.helpers import cached_property, b, PY2, u +from logbook.helpers import PY2, b, cached_property, u class Ticket: @@ -131,8 +132,8 @@ class SQLAlchemyBackend(BackendBase): """ def setup_backend(self): - from sqlalchemy import create_engine, MetaData - from sqlalchemy.orm import sessionmaker, scoped_session + from sqlalchemy import MetaData, create_engine + from sqlalchemy.orm import scoped_session, sessionmaker engine_or_uri = self.options.pop('uri', None) metadata = self.options.pop('metadata', None) table_prefix = self.options.pop('table_prefix', 'logbook_') diff --git a/logbook/utils.py b/logbook/utils.py index 0f67543..2d485f5 100644 --- a/logbook/utils.py +++ b/logbook/utils.py @@ -1,9 +1,9 @@ -from contextlib import contextmanager import functools import sys import threading +from contextlib import contextmanager -from .base import Logger, DEBUG +from .base import DEBUG, Logger from .helpers import string_types diff --git a/pyproject.toml b/pyproject.toml index 037cd75..866c464 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,3 +42,6 @@ all = ["Logbook[execnet,sqlalchemy,redis,zmq,jinja,compression]"] [tool.pytest.ini_options] testpaths = ["tests"] + +[tool.isort] +profile = "black" diff --git a/scripts/make-release.py b/scripts/make-release.py index 019de2a..d99c19e 100644 --- a/scripts/make-release.py +++ b/scripts/make-release.py @@ -9,12 +9,12 @@ :copyright: (c) 2011 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ -import sys +import argparse import os import re -import argparse -from datetime import datetime, date -from subprocess import Popen, PIPE +import sys +from datetime import date, datetime +from subprocess import PIPE, Popen _date_clean_re = re.compile(r'(\d+)(st|nd|rd|th)') diff --git a/setup.py b/setup.py index 6355c98..9feec42 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,6 @@ from setuptools import Extension, setup - IS_CPYTHON = platform.python_implementation() == "CPython" DISABLE_EXTENSION = bool(os.environ.get("DISABLE_LOGBOOK_CEXT")) diff --git a/tests/conftest.py b/tests/conftest.py index 251e501..0ddd5b9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,8 +1,9 @@ import sys -import logbook import pytest +import logbook + logbook.StderrHandler().push_application() @@ -102,7 +103,7 @@ def gevent(request): module_name = getattr(request.module, '__name__', '') if (not any(s in module_name for s in ('queues', 'processors')) and request.param): - from logbook.concurrency import enable_gevent, _disable_gevent + from logbook.concurrency import _disable_gevent, enable_gevent enable_gevent() @request.addfinalizer diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 469554b..1297b63 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -1,6 +1,8 @@ +import asyncio + import pytest + import logbook -import asyncio from logbook.concurrency import has_contextvars ITERATIONS = 100 diff --git a/tests/test_ci.py b/tests/test_ci.py index 2dd0c58..3fa34e6 100644 --- a/tests/test_ci.py +++ b/tests/test_ci.py @@ -4,6 +4,7 @@ from .utils import appveyor, travis + @appveyor def test_appveyor_speedups(): if os.environ.get('CYBUILD'): diff --git a/tests/test_deadlock.py b/tests/test_deadlock.py index bce0094..f0cf3c7 100644 --- a/tests/test_deadlock.py +++ b/tests/test_deadlock.py @@ -1,4 +1,5 @@ import sys + import logbook diff --git a/tests/test_file_handler.py b/tests/test_file_handler.py index fe4dff0..092a979 100644 --- a/tests/test_file_handler.py +++ b/tests/test_file_handler.py @@ -1,13 +1,15 @@ +import gzip import os -import pytest import time from datetime import datetime +import brotli +import pytest + import logbook from logbook.helpers import u, xrange -import gzip -import brotli -from .utils import capturing_stderr_context, LETTERS + +from .utils import LETTERS, capturing_stderr_context def test_file_handler(logfile, activation_strategy, logger): diff --git a/tests/test_flags.py b/tests/test_flags.py index eb0bf59..74a3215 100644 --- a/tests/test_flags.py +++ b/tests/test_flags.py @@ -1,7 +1,7 @@ -import logbook - import pytest +import logbook + from .utils import capturing_stderr_context diff --git a/tests/test_handler_errors.py b/tests/test_handler_errors.py index 8095083..6548e4b 100644 --- a/tests/test_handler_errors.py +++ b/tests/test_handler_errors.py @@ -1,10 +1,10 @@ import re import sys -import logbook - import pytest +import logbook + from .utils import capturing_stderr_context __file_without_pyc__ = __file__ diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 23f9d97..eedbcf4 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -1,8 +1,9 @@ -from logbook.helpers import u from datetime import datetime import pytest +from logbook.helpers import u + def test_jsonhelper(): from logbook.helpers import to_safe_json diff --git a/tests/test_logbook.py b/tests/test_logbook.py index 0408315..8ea93f7 100644 --- a/tests/test_logbook.py +++ b/tests/test_logbook.py @@ -1,7 +1,7 @@ -import logbook - import pytest +import logbook + def test_global_functions(activation_strategy): with activation_strategy(logbook.TestHandler()) as handler: diff --git a/tests/test_logger.py b/tests/test_logger.py index 9ac5ab8..908d653 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -1,6 +1,7 @@ -import logbook import pytest +import logbook + def test_level_properties(logger): assert logger.level == logbook.NOTSET diff --git a/tests/test_logging_api.py b/tests/test_logging_api.py index a9b2c64..e2dd3ec 100644 --- a/tests/test_logging_api.py +++ b/tests/test_logging_api.py @@ -1,11 +1,11 @@ import pickle import sys -import logbook -from logbook.helpers import iteritems, xrange, u - import pytest +import logbook +from logbook.helpers import iteritems, u, xrange + def test_basic_logging(active_handler, logger): logger.warn('This is a warning. Nice hah?') diff --git a/tests/test_logging_compat.py b/tests/test_logging_compat.py index 7964993..37e5640 100644 --- a/tests/test_logging_compat.py +++ b/tests/test_logging_compat.py @@ -1,15 +1,14 @@ import functools from random import randrange +import pytest + import logbook import logbook.compat from logbook.helpers import StringIO -import pytest - from .utils import capturing_stderr_context - __file_without_pyc__ = __file__ if __file_without_pyc__.endswith(".pyc"): __file_without_pyc__ = __file_without_pyc__[:-1] @@ -18,6 +17,7 @@ @pytest.mark.parametrize('set_root_logger_level', [True, False]) def test_basic_compat(request, set_root_logger_level): import logging + from logbook.compat import redirected_logging # mimic the default logging setting @@ -68,6 +68,7 @@ def test_redirect_logbook(): from itertools import count + test_warning_redirections_i = count() @@ -77,7 +78,7 @@ def test_warning_redirections(): redirector = redirected_warnings() redirector.start() try: - from warnings import warn, resetwarnings + from warnings import resetwarnings, warn resetwarnings() warn(RuntimeWarning('Testing' + str(next(test_warning_redirections_i)))) finally: diff --git a/tests/test_logging_times.py b/tests/test_logging_times.py index c87c4e8..844a488 100644 --- a/tests/test_logging_times.py +++ b/tests/test_logging_times.py @@ -1,9 +1,9 @@ from datetime import datetime, timedelta, tzinfo -import logbook - import pytest +import logbook + from .utils import get_total_delta_seconds diff --git a/tests/test_more.py b/tests/test_more.py index ebe8b38..d58758b 100644 --- a/tests/test_more.py +++ b/tests/test_more.py @@ -1,10 +1,10 @@ import sys +import pytest + import logbook from logbook.helpers import StringIO -import pytest - from .utils import capturing_stderr_context, missing, require_module @@ -22,6 +22,7 @@ def test_jinja_formatter(logger): @missing('jinja2') def test_missing_jinja2(): from logbook.more import JinjaFormatter + # check the RuntimeError is raised with pytest.raises(RuntimeError): JinjaFormatter('dummy') @@ -61,7 +62,7 @@ def stream(self): def test_tagged(default_handler): - from logbook.more import TaggingLogger, TaggingHandler + from logbook.more import TaggingHandler, TaggingLogger stream = StringIO() second_handler = logbook.StreamHandler(stream) diff --git a/tests/test_nteventlog_handler.py b/tests/test_nteventlog_handler.py index b89e25f..9d10cc3 100644 --- a/tests/test_nteventlog_handler.py +++ b/tests/test_nteventlog_handler.py @@ -1,8 +1,9 @@ import os -import logbook import pytest +import logbook + from .utils import require_module @@ -13,10 +14,16 @@ reason="Don't clutter NT Event Log unless enabled.") def test_nteventlog_handler(): from win32con import ( - EVENTLOG_ERROR_TYPE, EVENTLOG_INFORMATION_TYPE, EVENTLOG_WARNING_TYPE) + EVENTLOG_ERROR_TYPE, + EVENTLOG_INFORMATION_TYPE, + EVENTLOG_WARNING_TYPE, + ) from win32evtlog import ( - EVENTLOG_BACKWARDS_READ, EVENTLOG_SEQUENTIAL_READ, OpenEventLog, - ReadEventLog) + EVENTLOG_BACKWARDS_READ, + EVENTLOG_SEQUENTIAL_READ, + OpenEventLog, + ReadEventLog, + ) from win32evtlogutil import SafeFormatMessage logger = logbook.Logger('Test Logger') diff --git a/tests/test_queues.py b/tests/test_queues.py index bfde9a4..1395ba9 100644 --- a/tests/test_queues.py +++ b/tests/test_queues.py @@ -2,12 +2,12 @@ import socket import time -from .utils import require_module, missing, LETTERS +import pytest import logbook from logbook.helpers import u -import pytest +from .utils import LETTERS, missing, require_module REDIS_HOST = os.environ.get('REDIS_HOST', 'localhost') REDIS_PORT = int(os.environ.get('REDIS_PORT', '6379')) @@ -77,6 +77,7 @@ def test_multi_processing_handler(): if os.getenv('APPVEYOR') == 'True': pytest.skip('Test hangs on AppVeyor CI') from multiprocessing import Process, Queue + from logbook.queues import MultiProcessingSubscriber queue = Queue(-1) test_handler = logbook.TestHandler() @@ -201,6 +202,7 @@ def test_subscriber_group(): if os.getenv('APPVEYOR') == 'True': pytest.skip('Test hangs on AppVeyor CI') from multiprocessing import Process, Queue + from logbook.queues import MultiProcessingSubscriber, SubscriberGroup a_queue = Queue(-1) b_queue = Queue(-1) @@ -223,6 +225,7 @@ def test_subscriber_group(): @require_module('redis') def test_redis_handler(): import redis + from logbook.queues import RedisHandler KEY = f'redis-{os.getpid()}' @@ -292,6 +295,7 @@ def test_redis_handler_lpush(): new items should be first on list """ import redis + from logbook.queues import RedisHandler null_handler = logbook.NullHandler() @@ -320,6 +324,7 @@ def test_redis_handler_rpush(): old items should be first on list """ import redis + from logbook.queues import RedisHandler null_handler = logbook.NullHandler() diff --git a/tests/test_syslog_handler.py b/tests/test_syslog_handler.py index 7532d0b..eb5ef4b 100644 --- a/tests/test_syslog_handler.py +++ b/tests/test_syslog_handler.py @@ -3,9 +3,10 @@ import socket from contextlib import closing -import logbook import pytest +import logbook + UNIX_SOCKET = "/tmp/__unixsock_logbook.test" DELIMITERS = { diff --git a/tests/test_ticketing.py b/tests/test_ticketing.py index b203cb6..209065f 100644 --- a/tests/test_ticketing.py +++ b/tests/test_ticketing.py @@ -6,8 +6,9 @@ except ImportError: from _thread import get_ident -import logbook import pytest + +import logbook from logbook.helpers import xrange from .utils import require_module @@ -24,8 +25,9 @@ reason='Problem with in-memory sqlite on Python 3.2, 3.3 and Windows') @require_module('sqlalchemy') def test_basic_ticketing(logger): - from logbook.ticketing import TicketingHandler from time import sleep + + from logbook.ticketing import TicketingHandler with TicketingHandler('sqlite:///') as handler: for x in xrange(5): logger.warn('A warning') diff --git a/tests/test_unicode.py b/tests/test_unicode.py index 5c3a126..c716e3a 100644 --- a/tests/test_unicode.py +++ b/tests/test_unicode.py @@ -1,7 +1,7 @@ -from .utils import require_py3, capturing_stderr_context - import logbook +from .utils import capturing_stderr_context, require_py3 + @require_py3 def test_default_format_unicode(logger): diff --git a/tests/test_utils.py b/tests/test_utils.py index 23de89a..9a4128f 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,10 +1,15 @@ +from time import sleep + import pytest -import logbook +import logbook from logbook.utils import ( - logged_if_slow, deprecated, forget_deprecation_locations, - suppressed_deprecations, log_deprecation_message) -from time import sleep + deprecated, + forget_deprecation_locations, + log_deprecation_message, + logged_if_slow, + suppressed_deprecations, +) _THRESHOLD = 0.1 diff --git a/tests/utils.py b/tests/utils.py index ec81f30..b12ceb1 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -10,11 +10,11 @@ import sys from contextlib import contextmanager +import pytest + import logbook from logbook.helpers import StringIO -import pytest - _missing = object() LETTERS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" From 3f1889e7959b34bae52b56afc309407080114214 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 25 Jun 2023 21:57:32 +0200 Subject: [PATCH 13/70] pre-commit: black --- benchmark/bench_disabled_introspection.py | 4 +- benchmark/bench_disabled_logger.py | 4 +- benchmark/bench_enabled_introspection.py | 4 +- benchmark/bench_file_handler.py | 4 +- benchmark/bench_file_handler_unicode.py | 4 +- benchmark/bench_logger_creation.py | 2 +- benchmark/bench_logger_level_low.py | 4 +- benchmark/bench_logging_file_handler.py | 4 +- .../bench_logging_file_handler_unicode.py | 4 +- benchmark/bench_logging_logger_creation.py | 4 +- benchmark/bench_logging_logger_level_low.py | 4 +- benchmark/bench_logging_noop.py | 4 +- benchmark/bench_logging_noop_filter.py | 4 +- benchmark/bench_logging_stream_handler.py | 6 +- benchmark/bench_noop.py | 4 +- benchmark/bench_noop_filter.py | 4 +- benchmark/bench_noop_filter_on_handler.py | 4 +- benchmark/bench_redirect_from_logging.py | 6 +- benchmark/bench_redirect_to_logging.py | 6 +- benchmark/bench_stream_handler.py | 6 +- benchmark/bench_test_handler.py | 4 +- benchmark/run.py | 42 +- docs/conf.py | 44 +- logbook/__init__.py | 4 +- logbook/_fallback.py | 31 +- logbook/_termcolors.py | 31 +- logbook/base.py | 230 +++--- logbook/compat.py | 85 ++- logbook/concurrency.py | 28 +- logbook/handlers.py | 718 +++++++++++------- logbook/helpers.py | 86 ++- logbook/more.py | 254 ++++--- logbook/notifiers.py | 245 ++++-- logbook/queues.py | 105 ++- logbook/ticketing.py | 327 ++++---- logbook/utils.py | 41 +- scripts/make-release.py | 85 ++- setup.py | 1 + tests/conftest.py | 25 +- tests/test_asyncio.py | 8 +- tests/test_ci.py | 5 +- tests/test_deadlock.py | 3 +- tests/test_file_handler.py | 217 +++--- tests/test_fingers_crossed_handler.py | 58 +- tests/test_flags.py | 25 +- tests/test_groups.py | 61 +- tests/test_handler_errors.py | 39 +- tests/test_handlers.py | 99 +-- tests/test_helpers.py | 43 +- tests/test_log_record.py | 20 +- tests/test_logbook.py | 36 +- tests/test_logger.py | 12 +- tests/test_logging_api.py | 32 +- tests/test_logging_compat.py | 48 +- tests/test_logging_times.py | 43 +- tests/test_mail_handler.py | 216 +++--- tests/test_more.py | 143 ++-- tests/test_nteventlog_handler.py | 40 +- tests/test_null_handler.py | 26 +- tests/test_processors.py | 54 +- tests/test_queues.py | 166 ++-- tests/test_syslog_handler.py | 44 +- tests/test_test_handler.py | 58 +- tests/test_ticketing.py | 23 +- tests/test_unicode.py | 26 +- tests/test_utils.py | 73 +- tests/utils.py | 22 +- 67 files changed, 2364 insertions(+), 1748 deletions(-) diff --git a/benchmark/bench_disabled_introspection.py b/benchmark/bench_disabled_introspection.py index 2d3450c..4585cef 100644 --- a/benchmark/bench_disabled_introspection.py +++ b/benchmark/bench_disabled_introspection.py @@ -1,7 +1,7 @@ """Tests with frame introspection disabled""" from logbook import Flags, Logger, NullHandler -log = Logger('Test logger') +log = Logger("Test logger") class DummyHandler(NullHandler): @@ -12,4 +12,4 @@ def run(): with Flags(introspection=False): with DummyHandler() as handler: for x in xrange(500): - log.warning('this is not handled') + log.warning("this is not handled") diff --git a/benchmark/bench_disabled_logger.py b/benchmark/bench_disabled_logger.py index 3dd9f3c..8644ca2 100644 --- a/benchmark/bench_disabled_logger.py +++ b/benchmark/bench_disabled_logger.py @@ -1,10 +1,10 @@ """Tests with the whole logger disabled""" from logbook import Logger -log = Logger('Test logger') +log = Logger("Test logger") log.disabled = True def run(): for x in xrange(500): - log.warning('this is not handled') + log.warning("this is not handled") diff --git a/benchmark/bench_enabled_introspection.py b/benchmark/bench_enabled_introspection.py index a30d574..3147e80 100644 --- a/benchmark/bench_enabled_introspection.py +++ b/benchmark/bench_enabled_introspection.py @@ -1,7 +1,7 @@ """Tests with stack frame introspection enabled""" from logbook import Flags, Logger, NullHandler -log = Logger('Test logger') +log = Logger("Test logger") class DummyHandler(NullHandler): @@ -12,4 +12,4 @@ def run(): with Flags(introspection=True): with DummyHandler() as handler: for x in xrange(500): - log.warning('this is not handled') + log.warning("this is not handled") diff --git a/benchmark/bench_file_handler.py b/benchmark/bench_file_handler.py index dcf1451..ec19095 100644 --- a/benchmark/bench_file_handler.py +++ b/benchmark/bench_file_handler.py @@ -3,11 +3,11 @@ from logbook import FileHandler, Logger -log = Logger('Test logger') +log = Logger("Test logger") def run(): f = NamedTemporaryFile() with FileHandler(f.name) as handler: for x in xrange(500): - log.warning('this is handled') + log.warning("this is handled") diff --git a/benchmark/bench_file_handler_unicode.py b/benchmark/bench_file_handler_unicode.py index 73031b7..573116c 100644 --- a/benchmark/bench_file_handler_unicode.py +++ b/benchmark/bench_file_handler_unicode.py @@ -3,11 +3,11 @@ from logbook import FileHandler, Logger -log = Logger('Test logger') +log = Logger("Test logger") def run(): f = NamedTemporaryFile() with FileHandler(f.name) as handler: for x in xrange(500): - log.warning('this is handled \x6f') + log.warning("this is handled \x6f") diff --git a/benchmark/bench_logger_creation.py b/benchmark/bench_logger_creation.py index d37cacd..9a2a1ad 100644 --- a/benchmark/bench_logger_creation.py +++ b/benchmark/bench_logger_creation.py @@ -4,4 +4,4 @@ def run(): for x in xrange(500): - Logger('Test') + Logger("Test") diff --git a/benchmark/bench_logger_level_low.py b/benchmark/bench_logger_level_low.py index 9f4e953..009f445 100644 --- a/benchmark/bench_logger_level_low.py +++ b/benchmark/bench_logger_level_low.py @@ -3,7 +3,7 @@ from logbook import ERROR, Logger, StreamHandler -log = Logger('Test logger') +log = Logger("Test logger") log.level = ERROR @@ -11,4 +11,4 @@ def run(): out = StringIO() with StreamHandler(out): for x in xrange(500): - log.warning('this is not handled') + log.warning("this is not handled") diff --git a/benchmark/bench_logging_file_handler.py b/benchmark/bench_logging_file_handler.py index e688c59..c18d607 100644 --- a/benchmark/bench_logging_file_handler.py +++ b/benchmark/bench_logging_file_handler.py @@ -2,7 +2,7 @@ from logging import FileHandler, getLogger from tempfile import NamedTemporaryFile -log = getLogger('Testlogger') +log = getLogger("Testlogger") def run(): @@ -10,4 +10,4 @@ def run(): handler = FileHandler(f.name) log.addHandler(handler) for x in xrange(500): - log.warning('this is handled') + log.warning("this is handled") diff --git a/benchmark/bench_logging_file_handler_unicode.py b/benchmark/bench_logging_file_handler_unicode.py index 8a5a57a..de82c95 100644 --- a/benchmark/bench_logging_file_handler_unicode.py +++ b/benchmark/bench_logging_file_handler_unicode.py @@ -2,7 +2,7 @@ from logging import FileHandler, getLogger from tempfile import NamedTemporaryFile -log = getLogger('Testlogger') +log = getLogger("Testlogger") def run(): @@ -10,4 +10,4 @@ def run(): handler = FileHandler(f.name) log.addHandler(handler) for x in xrange(500): - log.warning('this is handled \x6f') + log.warning("this is handled \x6f") diff --git a/benchmark/bench_logging_logger_creation.py b/benchmark/bench_logging_logger_creation.py index bbfca9b..246ecc4 100644 --- a/benchmark/bench_logging_logger_creation.py +++ b/benchmark/bench_logging_logger_creation.py @@ -6,5 +6,5 @@ def run(): for x in xrange(500): - getLogger('Test') - del root_logger.manager.loggerDict['Test'] + getLogger("Test") + del root_logger.manager.loggerDict["Test"] diff --git a/benchmark/bench_logging_logger_level_low.py b/benchmark/bench_logging_logger_level_low.py index 55c1c8b..35286c5 100644 --- a/benchmark/bench_logging_logger_level_low.py +++ b/benchmark/bench_logging_logger_level_low.py @@ -3,7 +3,7 @@ from cStringIO import StringIO -log = getLogger('Testlogger') +log = getLogger("Testlogger") log.setLevel(ERROR) @@ -12,4 +12,4 @@ def run(): handler = StreamHandler(out) log.addHandler(handler) for x in xrange(500): - log.warning('this is not handled') + log.warning("this is not handled") diff --git a/benchmark/bench_logging_noop.py b/benchmark/bench_logging_noop.py index 68d0bee..64cf527 100644 --- a/benchmark/bench_logging_noop.py +++ b/benchmark/bench_logging_noop.py @@ -3,7 +3,7 @@ from cStringIO import StringIO -log = getLogger('Testlogger') +log = getLogger("Testlogger") def run(): @@ -12,4 +12,4 @@ def run(): handler.setLevel(ERROR) log.addHandler(handler) for x in xrange(500): - log.warning('this is not handled') + log.warning("this is not handled") diff --git a/benchmark/bench_logging_noop_filter.py b/benchmark/bench_logging_noop_filter.py index 01a9bf7..24ce4ba 100644 --- a/benchmark/bench_logging_noop_filter.py +++ b/benchmark/bench_logging_noop_filter.py @@ -3,7 +3,7 @@ from cStringIO import StringIO -log = getLogger('Testlogger') +log = getLogger("Testlogger") class DisableFilter(Filter): @@ -17,4 +17,4 @@ def run(): handler.addFilter(DisableFilter()) log.addHandler(handler) for x in xrange(500): - log.warning('this is not handled') + log.warning("this is not handled") diff --git a/benchmark/bench_logging_stream_handler.py b/benchmark/bench_logging_stream_handler.py index 8833675..bacaaa8 100644 --- a/benchmark/bench_logging_stream_handler.py +++ b/benchmark/bench_logging_stream_handler.py @@ -3,12 +3,12 @@ from cStringIO import StringIO -log = Logger('Test logger') +log = Logger("Test logger") def run(): out = StringIO() log.addHandler(StreamHandler(out)) for x in xrange(500): - log.warning('this is not handled') - assert out.getvalue().count('\n') == 500 + log.warning("this is not handled") + assert out.getvalue().count("\n") == 500 diff --git a/benchmark/bench_noop.py b/benchmark/bench_noop.py index f2b7166..6db4955 100644 --- a/benchmark/bench_noop.py +++ b/benchmark/bench_noop.py @@ -3,7 +3,7 @@ from logbook import ERROR, Logger, NullHandler, StreamHandler -log = Logger('Test logger') +log = Logger("Test logger") def run(): @@ -11,5 +11,5 @@ def run(): with NullHandler(): with StreamHandler(out, level=ERROR) as handler: for x in xrange(500): - log.warning('this is not handled') + log.warning("this is not handled") assert not out.getvalue() diff --git a/benchmark/bench_noop_filter.py b/benchmark/bench_noop_filter.py index 5f31659..99005dc 100644 --- a/benchmark/bench_noop_filter.py +++ b/benchmark/bench_noop_filter.py @@ -2,7 +2,7 @@ from logbook import Logger, NullHandler, StreamHandler -log = Logger('Test logger') +log = Logger("Test logger") def run(): @@ -10,5 +10,5 @@ def run(): with NullHandler(): with StreamHandler(out, filter=lambda r, h: False) as handler: for x in xrange(500): - log.warning('this is not handled') + log.warning("this is not handled") assert not out.getvalue() diff --git a/benchmark/bench_noop_filter_on_handler.py b/benchmark/bench_noop_filter_on_handler.py index 38bb3eb..becfbaa 100644 --- a/benchmark/bench_noop_filter_on_handler.py +++ b/benchmark/bench_noop_filter_on_handler.py @@ -3,7 +3,7 @@ from logbook import Logger, NullHandler, StreamHandler -log = Logger('Test logger') +log = Logger("Test logger") class CustomStreamHandler(StreamHandler): @@ -16,5 +16,5 @@ def run(): with NullHandler(): with CustomStreamHandler(out) as handler: for x in xrange(500): - log.warning('this is not handled') + log.warning("this is not handled") assert not out.getvalue() diff --git a/benchmark/bench_redirect_from_logging.py b/benchmark/bench_redirect_from_logging.py index 743a2f3..2957f7a 100644 --- a/benchmark/bench_redirect_from_logging.py +++ b/benchmark/bench_redirect_from_logging.py @@ -7,12 +7,12 @@ from logbook.compat import redirect_logging redirect_logging() -log = getLogger('Test logger') +log = getLogger("Test logger") def run(): out = StringIO() with StreamHandler(out): for x in xrange(500): - log.warning('this is not handled') - assert out.getvalue().count('\n') == 500 + log.warning("this is not handled") + assert out.getvalue().count("\n") == 500 diff --git a/benchmark/bench_redirect_to_logging.py b/benchmark/bench_redirect_to_logging.py index ebdb592..04d5852 100644 --- a/benchmark/bench_redirect_to_logging.py +++ b/benchmark/bench_redirect_to_logging.py @@ -5,7 +5,7 @@ from logbook.compat import LoggingHandler -log = getLogger('Test logger') +log = getLogger("Test logger") def run(): @@ -13,5 +13,5 @@ def run(): log.addHandler(StreamHandler(out)) with LoggingHandler(): for x in xrange(500): - log.warning('this is not handled') - assert out.getvalue().count('\n') == 500 + log.warning("this is not handled") + assert out.getvalue().count("\n") == 500 diff --git a/benchmark/bench_stream_handler.py b/benchmark/bench_stream_handler.py index fab2339..a033612 100644 --- a/benchmark/bench_stream_handler.py +++ b/benchmark/bench_stream_handler.py @@ -3,12 +3,12 @@ from logbook import Logger, StreamHandler -log = Logger('Test logger') +log = Logger("Test logger") def run(): out = StringIO() with StreamHandler(out) as handler: for x in xrange(500): - log.warning('this is not handled') - assert out.getvalue().count('\n') == 500 + log.warning("this is not handled") + assert out.getvalue().count("\n") == 500 diff --git a/benchmark/bench_test_handler.py b/benchmark/bench_test_handler.py index d8facfc..bd4d8e8 100644 --- a/benchmark/bench_test_handler.py +++ b/benchmark/bench_test_handler.py @@ -1,10 +1,10 @@ """Tests the test handler""" from logbook import Logger, TestHandler -log = Logger('Test logger') +log = Logger("Test logger") def run(): with TestHandler() as handler: for x in xrange(500): - log.warning('this is not handled') + log.warning("this is not handled") diff --git a/benchmark/run.py b/benchmark/run.py index eb61a35..3544dba 100644 --- a/benchmark/run.py +++ b/benchmark/run.py @@ -9,12 +9,13 @@ try: from pkg_resources import get_distribution - version = get_distribution('Logbook').version + + version = get_distribution("Logbook").version except Exception: - version = 'unknown version' + version = "unknown version" -_filename_re = re.compile(r'^bench_(.*?)\.py$') +_filename_re = re.compile(r"^bench_(.*?)\.py$") bench_directory = os.path.abspath(os.path.dirname(__file__)) @@ -24,38 +25,49 @@ def list_benchmarks(): match = _filename_re.match(name) if match is not None: result.append(match.group(1)) - result.sort(key=lambda x: (x.startswith('logging_'), x.lower())) + result.sort(key=lambda x: (x.startswith("logging_"), x.lower())) return result def run_bench(name, use_gevent=False): - sys.stdout.write('%-32s' % name) + sys.stdout.write("%-32s" % name) sys.stdout.flush() - Popen([sys.executable, '-mtimeit', '-s', - 'from bench_%s import run' % name, - 'from logbook.concurrency import enable_gevent', - 'enable_gevent()' if use_gevent else '', - 'run()']).wait() + Popen( + [ + sys.executable, + "-mtimeit", + "-s", + "from bench_%s import run" % name, + "from logbook.concurrency import enable_gevent", + "enable_gevent()" if use_gevent else "", + "run()", + ] + ).wait() def bench_wrapper(use_gevent=False): - print('=' * 80) - print(f'Running benchmark with Logbook {version} (gevent enabled={use_gevent})') - print('-' * 80) + print("=" * 80) + print( + "Running benchmark with Logbook {} (gevent enabled={})".format( + version, use_gevent + ) + ) + print("-" * 80) os.chdir(bench_directory) for bench in list_benchmarks(): run_bench(bench, use_gevent) - print('-' * 80) + print("-" * 80) def main(): bench_wrapper(False) try: import gevent + bench_wrapper(True) except ImportError: pass -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/docs/conf.py b/docs/conf.py index 66a13e6..8a1084a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -17,7 +17,7 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.extend((os.path.abspath('.'), os.path.abspath('..'))) +sys.path.extend((os.path.abspath("."), os.path.abspath(".."))) # -- General configuration ---------------------------------------------------- @@ -26,30 +26,32 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'Logbook' -copyright = '2010, Armin Ronacher, Georg Brandl' +project = "Logbook" +copyright = "2010, Armin Ronacher, Georg Brandl" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -with open(os.path.join(os.path.dirname(__file__), "..", "logbook", "__version__.py")) as version_file: +with open( + os.path.join(os.path.dirname(__file__), "..", "logbook", "__version__.py") +) as version_file: # can't use import here... version = release = version_file.read().strip().split("=")[1].strip()[1:-1] @@ -65,7 +67,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -83,7 +85,7 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -93,17 +95,17 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'sheet' +html_theme = "sheet" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - 'nosidebar': True, + "nosidebar": True, } # Add any paths that contain custom themes here, relative to this directory. -html_theme_path = ['.'] +html_theme_path = ["."] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". @@ -124,7 +126,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. @@ -170,7 +172,7 @@ # html_file_suffix = '' # Output file base name for HTML help builder. -htmlhelp_basename = 'Logbookdoc' +htmlhelp_basename = "Logbookdoc" # -- Options for LaTeX output ------------------------------------------------- @@ -185,8 +187,13 @@ # (source start file, target name, title, author, # documentclass [howto/manual]). latex_documents = [ - ('index', 'Logbook.tex', 'Logbook Documentation', - 'Armin Ronacher, Georg Brandl', 'manual'), + ( + "index", + "Logbook.tex", + "Logbook Documentation", + "Armin Ronacher, Georg Brandl", + "manual", + ), ] # The name of an image file (relative to this directory) to place at the top of @@ -218,8 +225,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'logbook', 'Logbook Documentation', - ['Armin Ronacher, Georg Brandl'], 1) + ("index", "logbook", "Logbook Documentation", ["Armin Ronacher, Georg Brandl"], 1) ] intersphinx_mapping = { diff --git a/logbook/__init__.py b/logbook/__init__.py index 80677ef..1cc8b56 100644 --- a/logbook/__init__.py +++ b/logbook/__init__.py @@ -60,7 +60,7 @@ # create an anonymous default logger and provide all important # methods of that logger as global functions -_default_logger = Logger('Generic') +_default_logger = Logger("Generic") _default_logger.suppress_dispatcher = True trace = _default_logger.trace debug = _default_logger.debug @@ -77,7 +77,7 @@ # install a default global handler -if os.environ.get('LOGBOOK_INSTALL_DEFAULT_HANDLER'): +if os.environ.get("LOGBOOK_INSTALL_DEFAULT_HANDLER"): default_handler = StderrHandler() default_handler.push_application() diff --git a/logbook/_fallback.py b/logbook/_fallback.py index 71bb901..86275a2 100644 --- a/logbook/_fallback.py +++ b/logbook/_fallback.py @@ -32,8 +32,9 @@ def group_reflected_property(name, default, fallback=_missing): value of the group if set. If there is no such group, the provided default is used. """ + def _get(self): - rv = getattr(self, '_' + name, _missing) + rv = getattr(self, "_" + name, _missing) if rv is not _missing and rv != fallback: return rv if self.group is None: @@ -41,15 +42,15 @@ def _get(self): return getattr(self.group, name) def _set(self, value): - setattr(self, '_' + name, value) + setattr(self, "_" + name, value) def _del(self): - delattr(self, '_' + name) + delattr(self, "_" + name) + return property(_get, _set, _del) class _StackBound: - def __init__(self, obj, push, pop): self.__obj = obj self.__push = push @@ -150,7 +151,7 @@ def __init__(self): self._thread_context = thread_local() self._greenlet_context_lock = GreenletRLock() self._greenlet_context = greenlet_local() - self._context_stack = ContextVar('stack') + self._context_stack = ContextVar("stack") self._cache = {} self._stackop = get_iterator_next_method(count()) @@ -173,10 +174,10 @@ def iter_context_objects(self): if len(self._cache) > _MAX_CONTEXT_OBJECT_CACHE: self._cache.clear() objects = self._global[:] - objects.extend(getattr(self._thread_context, 'stack', ())) + objects.extend(getattr(self._thread_context, "stack", ())) if use_gevent: - objects.extend(getattr(self._greenlet_context, 'stack', ())) + objects.extend(getattr(self._greenlet_context, "stack", ())) if use_context: objects.extend(self._context_stack.get([])) @@ -192,7 +193,7 @@ def push_greenlet(self, obj): # remote chance to conflict with thread ids self._cache.pop(greenlet_get_ident(), None) item = (self._stackop(), obj) - stack = getattr(self._greenlet_context, 'stack', None) + stack = getattr(self._greenlet_context, "stack", None) if stack is None: self._greenlet_context.stack = [item] else: @@ -205,8 +206,8 @@ def pop_greenlet(self): try: # remote chance to conflict with thread ids self._cache.pop(greenlet_get_ident(), None) - stack = getattr(self._greenlet_context, 'stack', None) - assert stack, 'no objects on stack' + stack = getattr(self._greenlet_context, "stack", None) + assert stack, "no objects on stack" return stack.pop()[1] finally: self._greenlet_context_lock.release() @@ -224,7 +225,7 @@ def push_context(self, obj): def pop_context(self): self._cache.pop(context_get_ident(), None) stack = self._context_stack.get(None) - assert stack, 'no objects on stack' + assert stack, "no objects on stack" return stack.pop()[1] def push_thread(self, obj): @@ -232,7 +233,7 @@ def push_thread(self, obj): try: self._cache.pop(thread_get_ident(), None) item = (self._stackop(), obj) - stack = getattr(self._thread_context, 'stack', None) + stack = getattr(self._thread_context, "stack", None) if stack is None: self._thread_context.stack = [item] else: @@ -244,8 +245,8 @@ def pop_thread(self): self._thread_context_lock.acquire() try: self._cache.pop(thread_get_ident(), None) - stack = getattr(self._thread_context, 'stack', None) - assert stack, 'no objects on stack' + stack = getattr(self._thread_context, "stack", None) + assert stack, "no objects on stack" return stack.pop()[1] finally: self._thread_context_lock.release() @@ -255,7 +256,7 @@ def push_application(self, obj): self._cache.clear() def pop_application(self): - assert self._global, 'no objects on application stack' + assert self._global, "no objects on application stack" popped = self._global.pop()[1] self._cache.clear() return popped diff --git a/logbook/_termcolors.py b/logbook/_termcolors.py index dc08cfa..4a35383 100644 --- a/logbook/_termcolors.py +++ b/logbook/_termcolors.py @@ -12,10 +12,26 @@ codes = {"": "", "reset": esc + "39;49;00m"} -dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue", - "purple", "teal", "lightgray"] -light_colors = ["darkgray", "red", "green", "yellow", "blue", - "fuchsia", "turquoise", "white"] +dark_colors = [ + "black", + "darkred", + "darkgreen", + "brown", + "darkblue", + "purple", + "teal", + "lightgray", +] +light_colors = [ + "darkgray", + "red", + "green", + "yellow", + "blue", + "fuchsia", + "turquoise", + "white", +] x = 30 for d, l in zip(dark_colors, light_colors): @@ -34,10 +50,11 @@ def _str_to_type(obj, strtype): """Helper for ansiformat and colorize""" if isinstance(obj, type(strtype)): return obj - return obj.encode('ascii') + return obj.encode("ascii") def colorize(color_key, text): """Returns an ANSI formatted text with the given color.""" - return (_str_to_type(codes[color_key], text) + text + - _str_to_type(codes["reset"], text)) + return ( + _str_to_type(codes[color_key], text) + text + _str_to_type(codes["reset"], text) + ) diff --git a/logbook/base.py b/logbook/base.py index 4772717..813b961 100644 --- a/logbook/base.py +++ b/logbook/base.py @@ -30,7 +30,7 @@ _has_speedups = False try: - if os.environ.get('DISABLE_LOGBOOK_CEXT_AT_RUNTIME'): + if os.environ.get("DISABLE_LOGBOOK_CEXT_AT_RUNTIME"): raise ImportError("Speedups disabled via DISABLE_LOGBOOK_CEXT_AT_RUNTIME") from logbook._speedups import ( @@ -110,13 +110,18 @@ def utc_tz(): elif callable(datetime_format): inst = datetime_format() if not isinstance(inst, datetime): - raise ValueError("Invalid callable value, valid callable " - "should return datetime.datetime instances, " - "not %r" % (type(inst),)) + raise ValueError( + "Invalid callable value, valid callable " + "should return datetime.datetime instances, " + "not %r" % (type(inst),) + ) _datetime_factory = datetime_format else: - raise ValueError("Invalid value %r. Valid values are 'utc' and " - "'local'." % (datetime_format,)) + raise ValueError( + "Invalid value %r. Valid values are 'utc' and " + "'local'." % (datetime_format,) + ) + # make sure to sync these up with _speedups.pyx CRITICAL = 15 @@ -129,14 +134,14 @@ def utc_tz(): NOTSET = 0 _level_names = { - CRITICAL: 'CRITICAL', - ERROR: 'ERROR', - WARNING: 'WARNING', - NOTICE: 'NOTICE', - INFO: 'INFO', - DEBUG: 'DEBUG', - TRACE: 'TRACE', - NOTSET: 'NOTSET' + CRITICAL: "CRITICAL", + ERROR: "ERROR", + WARNING: "WARNING", + NOTICE: "NOTICE", + INFO: "INFO", + DEBUG: "DEBUG", + TRACE: "TRACE", + NOTSET: "NOTSET", } _reverse_level_names = {v: k for (k, v) in iteritems(_level_names)} _missing = object() @@ -145,12 +150,14 @@ def utc_tz(): # on python 3 we can savely assume that frame filenames will be in # unicode, on Python 2 we have to apply a trick. if PY2: + def _convert_frame_filename(fn): if isinstance(fn, unicode): - fn = fn.decode(sys.getfilesystemencoding() or 'utf-8', - 'replace') + fn = fn.decode(sys.getfilesystemencoding() or "utf-8", "replace") return fn + else: + def _convert_frame_filename(fn): return fn @@ -165,8 +172,8 @@ def _get_level_name(self): def _set_level_name(self, level): self.level = lookup_level(level) - return property(_get_level_name, _set_level_name, - doc='The level as unicode string') + + return property(_get_level_name, _set_level_name, doc="The level as unicode string") def lookup_level(level): @@ -176,7 +183,7 @@ def lookup_level(level): try: return _reverse_level_names[level] except KeyError: - raise LookupError('unknown level name %s' % level) + raise LookupError("unknown level name %s" % level) def get_level_name(level): @@ -184,7 +191,7 @@ def get_level_name(level): try: return _level_names[level] except KeyError: - raise LookupError('unknown level') + raise LookupError("unknown level") class _ExceptionCatcher: @@ -201,7 +208,7 @@ def __enter__(self): def __exit__(self, exc_type, exc_value, tb): if exc_type is not None: kwargs = self.kwargs.copy() - kwargs['exc_info'] = (exc_type, exc_value, tb) + kwargs["exc_info"] = (exc_type, exc_value, tb) self.logger.exception(*self.args, **kwargs) return True @@ -222,7 +229,7 @@ def push_greenlet(self): def pop_greenlet(self): """Pops the context object from the stack.""" popped = self.stack_manager.pop_greenlet() - assert popped is self, 'popped unexpected object' + assert popped is self, "popped unexpected object" def push_context(self): """Pushes the context object to the context stack.""" @@ -231,7 +238,7 @@ def push_context(self): def pop_context(self): """Pops the context object from the stack.""" popped = self.stack_manager.pop_context() - assert popped is self, 'popped unexpected object' + assert popped is self, "popped unexpected object" def push_thread(self): """Pushes the context object to the thread stack.""" @@ -240,7 +247,7 @@ def push_thread(self): def pop_thread(self): """Pops the context object from the stack.""" popped = self.stack_manager.pop_thread() - assert popped is self, 'popped unexpected object' + assert popped is self, "popped unexpected object" def push_application(self): """Pushes the context object to the application stack.""" @@ -249,7 +256,7 @@ def push_application(self): def pop_application(self): """Pops the context object from the stack.""" popped = self.stack_manager.pop_application() - assert popped is self, 'popped unexpected object' + assert popped is self, "popped unexpected object" class NestedSetup(StackedObject): @@ -322,10 +329,12 @@ class _InheritedType: __slots__ = () def __repr__(self): - return 'Inherit' + return "Inherit" def __reduce__(self): - return 'Inherit' + return "Inherit" + + Inherit = _InheritedType() @@ -358,6 +367,7 @@ class Flags(ContextObject): with Flags(errors='silent'): ... """ + stack_manager = ContextStackManager() def __init__(self, **flags): @@ -387,12 +397,24 @@ class LogRecord: contain all the information pertinent to the event being logged. The main information passed in is in msg and args """ - _pullable_information = frozenset(( - 'func_name', 'module', 'filename', 'lineno', 'process_name', 'thread', - 'thread_name', 'greenlet', 'formatted_exception', 'message', - 'exception_name', 'exception_message' - )) - _noned_on_close = frozenset(('exc_info', 'frame', 'calling_frame')) + + _pullable_information = frozenset( + ( + "func_name", + "module", + "filename", + "lineno", + "process_name", + "thread", + "thread_name", + "greenlet", + "formatted_exception", + "message", + "exception_name", + "exception_message", + ) + ) + _noned_on_close = frozenset(("exc_info", "frame", "calling_frame")) #: can be overriden by a handler to not close the record. This could #: lead to memory leaks so it should be used carefully. @@ -414,9 +436,19 @@ class LogRecord: #: information that becomes unavailable on close. information_pulled = False - def __init__(self, channel, level, msg, args=None, kwargs=None, - exc_info=None, extra=None, frame=None, dispatcher=None, - frame_correction=0): + def __init__( + self, + channel, + level, + msg, + args=None, + kwargs=None, + exc_info=None, + extra=None, + frame=None, + dispatcher=None, + frame_correction=0, + ): #: the name of the logger that created it or any other textual #: channel description. This is a descriptive name and can be #: used for filtering. @@ -444,7 +476,7 @@ def __init__(self, channel, level, msg, args=None, kwargs=None, #: data. # TODO: Replace the lambda with str when we remove support for python 2 - self.extra = defaultdict(lambda: '', extra or ()) + self.extra = defaultdict(lambda: "", extra or ()) #: If available, optionally the interpreter frame that pulled the #: heavy init. This usually points to somewhere in the dispatcher. #: Might not be available for all calls and is removed when the log @@ -472,11 +504,11 @@ def heavy_init(self): """ if self.heavy_initialized: return - assert not self.late, 'heavy init is no longer possible' + assert not self.late, "heavy init is no longer possible" self.heavy_initialized = True self.process = os.getpid() self.time = _datetime_factory() - if self.frame is None and Flags.get_flag('introspection', True): + if self.frame is None and Flags.get_flag("introspection", True): self.frame = sys._getframe(1) if self.exc_info is True: self.exc_info = sys.exc_info() @@ -517,10 +549,10 @@ def to_dict(self, json_safe=False): self.pull_information() rv = {} for key, value in iteritems(self.__dict__): - if key[:1] != '_' and key not in self._noned_on_close: + if key[:1] != "_" and key not in self._noned_on_close: rv[key] = value # the extra dict is exported as regular dict - rv['extra'] = dict(rv['extra']) + rv["extra"] = dict(rv["extra"]) if json_safe: return to_safe_json(rv) return rv @@ -547,7 +579,7 @@ def update_from_dict(self, d): self.time = parse_iso8601(self.time) # TODO: Replace the lambda with str when we remove support for python 2` - self.extra = defaultdict(lambda: '', self.extra) + self.extra = defaultdict(lambda: "", self.extra) return self def _format_message(self, msg, *args, **kwargs): @@ -563,24 +595,24 @@ def message(self): return self.msg try: try: - return self._format_message(self.msg, *self.args, - **self.kwargs) + return self._format_message(self.msg, *self.args, **self.kwargs) except UnicodeDecodeError: # Assume an unicode message but mixed-up args - msg = self.msg.encode('utf-8', 'replace') + msg = self.msg.encode("utf-8", "replace") return self._format_message(msg, *self.args, **self.kwargs) except (UnicodeEncodeError, AttributeError): # we catch AttributeError since if msg is bytes, # it won't have the 'format' method - if (sys.exc_info()[0] is AttributeError - and (PY2 or not isinstance(self.msg, bytes))): + if sys.exc_info()[0] is AttributeError and ( + PY2 or not isinstance(self.msg, bytes) + ): # this is not the case we thought it is... raise # Assume encoded message with unicode args. # The assumption of utf8 as input encoding is just a guess, # but this codepath is unlikely (if the message is a constant # string in the caller's source file) - msg = self.msg.decode('utf-8', 'replace') + msg = self.msg.decode("utf-8", "replace") return self._format_message(msg, *self.args, **self.kwargs) except Exception: @@ -589,16 +621,21 @@ def message(self): # access to the frame. But there is not much we can do about # that. e = sys.exc_info()[1] - errormsg = ('Could not format message with provided ' - 'arguments: {err}\n msg={msg!r}\n ' - 'args={args!r} \n kwargs={kwargs!r}.\n' - 'Happened in file {file}, line {lineno}').format( - err=e, msg=self.msg, args=self.args, - kwargs=self.kwargs, file=self.filename, - lineno=self.lineno + errormsg = ( + "Could not format message with provided " + "arguments: {err}\n msg={msg!r}\n " + "args={args!r} \n kwargs={kwargs!r}.\n" + "Happened in file {file}, line {lineno}" + ).format( + err=e, + msg=self.msg, + args=self.args, + kwargs=self.kwargs, + file=self.filename, + lineno=self.lineno, ) if PY2: - errormsg = errormsg.encode('utf-8') + errormsg = errormsg.encode("utf-8") raise TypeError(errormsg) level_name = level_name_property() @@ -639,7 +676,7 @@ def module(self): """ cf = self.calling_frame if cf is not None: - return cf.f_globals.get('__name__') + return cf.f_globals.get("__name__") @cached_property def filename(self): @@ -649,7 +686,7 @@ def filename(self): cf = self.calling_frame if cf is not None: fn = cf.f_code.co_filename - if fn[:1] == '<' and fn[-1:] == '>': + if fn[:1] == "<" and fn[-1:] == ">": return fn return _convert_frame_filename(os.path.abspath(fn)) @@ -693,7 +730,7 @@ def process_name(self): # yet - e.g. if a custom import hook causes third-party code # to run when multiprocessing calls import. See issue 8200 # for an example - mp = sys.modules.get('multiprocessing') + mp = sys.modules.get("multiprocessing") if mp is not None: # pragma: no cover try: return mp.current_process().name @@ -706,9 +743,9 @@ def formatted_exception(self): in case there was any. """ if self.exc_info is not None and self.exc_info != (None, None, None): - rv = ''.join(traceback.format_exception(*self.exc_info)) + rv = "".join(traceback.format_exception(*self.exc_info)) if PY2: - rv = rv.decode('utf-8', 'replace') + rv = rv.decode("utf-8", "replace") return rv.rstrip() @cached_property @@ -716,12 +753,12 @@ def exception_name(self): """The name of the exception.""" if self.exc_info is not None: cls = self.exc_info[0] - return u(cls.__module__ + '.' + cls.__name__) + return u(cls.__module__ + "." + cls.__name__) @property def exception_shortname(self): """An abbreviated exception name (no import path)""" - return self.exception_name.rsplit('.')[-1] + return self.exception_name.rsplit(".")[-1] @cached_property def exception_message(self): @@ -734,7 +771,7 @@ def exception_message(self): else: return str(val) except UnicodeError: - return str(val).decode('utf-8', 'replace') + return str(val).decode("utf-8", "replace") @property def dispatcher(self): @@ -814,11 +851,11 @@ def exception(self, *args, **kwargs): if self.disabled or ERROR < self.level: return if not args: - args = ('Uncaught exception occurred',) - if 'exc_info' not in kwargs: + args = ("Uncaught exception occurred",) + if "exc_info" not in kwargs: exc_info = sys.exc_info() - assert exc_info[0] is not None, 'no exception occurred' - kwargs.setdefault('exc_info', sys.exc_info()) + assert exc_info[0] is not None, "no exception occurred" + kwargs.setdefault("exc_info", sys.exc_info()) return self.error(*args, **kwargs) def critical(self, *args, **kwargs): @@ -849,7 +886,7 @@ def catch_exceptions(self, *args, **kwargs): execute_code_that_might_fail() """ if not args: - args = ('Uncaught exception occurred',) + args = ("Uncaught exception occurred",) return _ExceptionCatcher(self, args, kwargs) def enable(self): @@ -863,7 +900,7 @@ def enable(self): try: self.disabled = False except AttributeError: - raise AttributeError('The disabled property is read-only.') + raise AttributeError("The disabled property is read-only.") def disable(self): """Convenience method to disable this logger. @@ -876,14 +913,15 @@ def disable(self): try: self.disabled = True except AttributeError: - raise AttributeError('The disabled property is read-only.') + raise AttributeError("The disabled property is read-only.") def _log(self, level, args, kwargs): - exc_info = kwargs.pop('exc_info', None) - extra = kwargs.pop('extra', None) - frame_correction = kwargs.pop('frame_correction', 0) - self.make_record_and_handle(level, args[0], args[1:], kwargs, - exc_info, extra, frame_correction) + exc_info = kwargs.pop("exc_info", None) + extra = kwargs.pop("extra", None) + frame_correction = kwargs.pop("frame_correction", 0) + self.make_record_and_handle( + level, args[0], args[1:], kwargs, exc_info, extra, frame_correction + ) class RecordDispatcher: @@ -905,8 +943,8 @@ def __init__(self, name=None, level=NOTSET): #: the level of the record dispatcher as integer self.level = level - disabled = group_reflected_property('disabled', False) - level = group_reflected_property('level', NOTSET, fallback=NOTSET) + disabled = group_reflected_property("disabled", False) + level = group_reflected_property("level", NOTSET, fallback=NOTSET) def handle(self, record): """Call the handlers for the specified record. This is @@ -919,8 +957,9 @@ def handle(self, record): if not self.disabled and record.level >= self.level: self.call_handlers(record) - def make_record_and_handle(self, level, msg, args, kwargs, exc_info, - extra, frame_correction): + def make_record_and_handle( + self, level, msg, args, kwargs, exc_info, extra, frame_correction + ): """Creates a record from some given arguments and heads it over to the handling system. """ @@ -933,8 +972,18 @@ def make_record_and_handle(self, level, msg, args, kwargs, exc_info, if not self.suppress_dispatcher: channel = self - record = LogRecord(self.name, level, msg, args, kwargs, exc_info, - extra, None, channel, frame_correction) + record = LogRecord( + self.name, + level, + msg, + args, + kwargs, + exc_info, + extra, + None, + channel, + frame_correction, + ) # after handling the log record is closed which will remove some # referenes that would require a GC run on cpython. This includes @@ -969,8 +1018,9 @@ def call_handlers(self, record): # Both logger attached handlers as well as context specific # handlers are handled one after another. The latter also # include global handlers. - for handler in chain(self.handlers, - Handler.stack_manager.iter_context_objects()): + for handler in chain( + self.handlers, Handler.stack_manager.iter_context_objects() + ): # skip records that this handler is not interested in based # on the record and handler level or in case this method was # overridden on some custom logic. @@ -997,8 +1047,7 @@ def call_handlers(self, record): # record. The impact is that filters are slower than the # handler's should_handle function in case there is no default # handler that would handle the record (delayed init). - if (handler.filter is not None - and not handler.filter(record, handler)): + if handler.filter is not None and not handler.filter(record, handler): continue # We might have a filter, so now that we know we *should* handle @@ -1069,7 +1118,7 @@ def __init__(self, loggers=None, level=NOTSET, processor=None): def add_logger(self, logger): """Adds a logger to this group.""" - assert logger.group is None, 'Logger already belongs to a group' + assert logger.group is None, "Logger already belongs to a group" logger.group = self self.loggers.append(logger) @@ -1100,7 +1149,7 @@ def enable(self, force=False): self.disabled = False if force: for logger in self.loggers: - rv = getattr(logger, '_disabled', _missing) + rv = getattr(logger, "_disabled", _missing) if rv is not _missing: logger.enable() @@ -1118,7 +1167,7 @@ def disable(self, force=False): self.disabled = True if force: for logger in self.loggers: - rv = getattr(logger, '_disabled', _missing) + rv = getattr(logger, "_disabled", _missing) if rv is not _missing: logger.disable() @@ -1133,5 +1182,6 @@ def dispatch_record(record): """ _default_dispatcher.call_handlers(record) + # at that point we are safe to import handler -from logbook.handlers import Handler # isort:skip +from logbook.handlers import Handler # isort:skip diff --git a/logbook/compat.py b/logbook/compat.py index 874e57d..5c08c1a 100644 --- a/logbook/compat.py +++ b/logbook/compat.py @@ -44,6 +44,7 @@ class redirected_logging: with redirected_logging(): ... """ + def __init__(self, set_root_logger_level=True): self.old_handlers = logging.root.handlers[:] self.old_level = logging.root.level @@ -61,7 +62,6 @@ def end(self, etype=None, evalue=None, tb=None): class LoggingCompatRecord(logbook.LogRecord): - def _format_message(self, msg, *args, **kwargs): if kwargs: assert not args @@ -101,11 +101,28 @@ def find_extra(self, old_record): extra dictionaries. """ rv = vars(old_record).copy() - for key in ('name', 'msg', 'args', 'levelname', 'levelno', - 'pathname', 'filename', 'module', 'exc_info', - 'exc_text', 'lineno', 'funcName', 'created', - 'msecs', 'relativeCreated', 'thread', 'threadName', - 'greenlet', 'processName', 'process'): + for key in ( + "name", + "msg", + "args", + "levelname", + "levelno", + "pathname", + "filename", + "module", + "exc_info", + "exc_text", + "lineno", + "funcName", + "created", + "msecs", + "relativeCreated", + "thread", + "threadName", + "greenlet", + "processName", + "process", + ): rv.pop(key, None) return rv @@ -113,9 +130,11 @@ def find_caller(self, old_record): """Tries to find the caller that issued the call.""" frm = sys._getframe(2) while frm is not None: - if (frm.f_globals is globals() or - frm.f_globals is logbook.base.__dict__ or - frm.f_globals is logging.__dict__): + if ( + frm.f_globals is globals() + or frm.f_globals is logbook.base.__dict__ + or frm.f_globals is logging.__dict__ + ): frm = frm.f_back else: return frm @@ -135,12 +154,16 @@ def convert_record(self, old_record): if isinstance(args, collections_abc.Mapping): kwargs = args args = None - record = LoggingCompatRecord(old_record.name, - self.convert_level(old_record.levelno), - old_record.msg, args, - kwargs, old_record.exc_info, - self.find_extra(old_record), - self.find_caller(old_record)) + record = LoggingCompatRecord( + old_record.name, + self.convert_level(old_record.levelno), + old_record.msg, + args, + kwargs, + old_record.exc_info, + self.find_extra(old_record), + self.find_caller(old_record), + ) record.time = self.convert_time(old_record.created) return record @@ -163,8 +186,7 @@ class LoggingHandler(logbook.Handler): warn('This goes to logging') """ - def __init__(self, logger=None, level=logbook.NOTSET, filter=None, - bubble=False): + def __init__(self, logger=None, level=logbook.NOTSET, filter=None, bubble=False): logbook.Handler.__init__(self, level, filter, bubble) if logger is None: logger = logging.getLogger() @@ -203,16 +225,19 @@ def convert_record(self, old_record): """Converts a record from logbook to logging.""" if sys.version_info >= (2, 5): # make sure 2to3 does not screw this up - optional_kwargs = {'func': getattr(old_record, 'func_name')} + optional_kwargs = {"func": getattr(old_record, "func_name")} else: optional_kwargs = {} - record = logging.LogRecord(old_record.channel, - self.convert_level(old_record.level), - old_record.filename, - old_record.lineno, - old_record.message, - (), old_record.exc_info, - **optional_kwargs) + record = logging.LogRecord( + old_record.channel, + self.convert_level(old_record.level), + old_record.filename, + old_record.lineno, + old_record.message, + (), + old_record.exc_info, + **optional_kwargs, + ) for key, value in iteritems(old_record.extra): record.__dict__.setdefault(key, value) record.created = self.convert_time(old_record.time) @@ -259,12 +284,12 @@ def message_to_unicode(self, message): try: return u(str(message)) except UnicodeError: - return str(message).decode('utf-8', 'replace') + return str(message).decode("utf-8", "replace") def make_record(self, message, exception, filename, lineno): category = exception.__name__ - if exception.__module__ not in ('exceptions', 'builtins'): - category = exception.__module__ + '.' + category + if exception.__module__ not in ("exceptions", "builtins"): + category = exception.__module__ + "." + category rv = logbook.LogRecord(category, logbook.WARNING, message) # we don't know the caller, but we get that information from the # warning system. Just attach them. @@ -280,11 +305,11 @@ def start(self): warnings.filters = self._filters[:] self._showwarning = warnings.showwarning - def showwarning(message, category, filename, lineno, - file=None, line=None): + def showwarning(message, category, filename, lineno, file=None, line=None): message = self.message_to_unicode(message) record = self.make_record(message, category, filename, lineno) logbook.dispatch_record(record) + warnings.showwarning = showwarning def end(self, etype=None, evalue=None, tb=None): diff --git a/logbook/concurrency.py b/logbook/concurrency.py index cd35cc2..7159327 100644 --- a/logbook/concurrency.py +++ b/logbook/concurrency.py @@ -14,6 +14,7 @@ def _disable_gevent(): # for testing def is_gevent_enabled(): global use_gevent return use_gevent + except ImportError: has_gevent = False @@ -29,14 +30,15 @@ def is_gevent_enabled(): if has_gevent: from gevent.monkey import get_original as _get_original - ThreadLock = _get_original('threading', 'Lock') - ThreadRLock = _get_original('threading', 'RLock') + + ThreadLock = _get_original("threading", "Lock") + ThreadRLock = _get_original("threading", "RLock") try: - thread_get_ident = _get_original('threading', 'get_ident') + thread_get_ident = _get_original("threading", "get_ident") except AttributeError: # In 2.7, this is called _get_ident - thread_get_ident = _get_original('threading', '_get_ident') - thread_local = _get_original('threading', 'local') + thread_get_ident = _get_original("threading", "_get_ident") + thread_local = _get_original("threading", "local") from gevent.local import local as greenlet_local from gevent.lock import BoundedSemaphore @@ -55,8 +57,11 @@ def __init__(self): def __repr__(self): owner = self._owner - return "<%s owner=%r count=%d>" % (self.__class__.__name__, owner, - self._count) + return "<%s owner=%r count=%d>" % ( + self.__class__.__name__, + owner, + self._count, + ) def acquire(self, blocking=1): tid = thread_get_ident() @@ -119,7 +124,7 @@ def __exit__(self, t, v, tb): self.release() def _get_greenlet_lock(self): - if not hasattr(self._thread_local, 'greenlet_lock'): + if not hasattr(self._thread_local, "greenlet_lock"): greenlet_lock = self._thread_local.greenlet_lock = BoundedSemaphore(1) else: greenlet_lock = self._thread_local.greenlet_lock @@ -127,10 +132,12 @@ def _get_greenlet_lock(self): def _is_owned(self): return self._owner == (thread_get_ident(), greenlet_get_ident()) + else: from threading import Lock as ThreadLock from threading import RLock as ThreadRLock from threading import currentThread + try: from thread import _local as thread_local from thread import get_ident as thread_get_ident @@ -178,13 +185,13 @@ def new_fine_grained_lock(): from itertools import count context_ident_counter = count() - context_ident = ContextVar('context_ident') + context_ident = ContextVar("context_ident") def context_get_ident(): try: return context_ident.get() except LookupError: - ident = 'context-%s' % next(context_ident_counter) + ident = "context-%s" % next(context_ident_counter) context_ident.set(ident) return ident @@ -196,6 +203,7 @@ def is_context_enabled(): return False else: + class ContextVar: def __init__(self, name): self.name = name diff --git a/logbook/handlers.py b/logbook/handlers.py index 17fa842..c276ba2 100644 --- a/logbook/handlers.py +++ b/logbook/handlers.py @@ -63,11 +63,14 @@ ) DEFAULT_FORMAT_STRING = u( - '[{record.time:%Y-%m-%d %H:%M:%S.%f%z}] ' - '{record.level_name}: {record.channel}: {record.message}') + "[{record.time:%Y-%m-%d %H:%M:%S.%f%z}] " + "{record.level_name}: {record.channel}: {record.message}" +) -SYSLOG_FORMAT_STRING = u('{record.channel}: {record.message}') -NTLOG_FORMAT_STRING = dedent(u(''' +SYSLOG_FORMAT_STRING = u("{record.channel}: {record.message}") +NTLOG_FORMAT_STRING = dedent( + u( + """ Message Level: {record.level_name} Location: {record.filename}:{record.lineno} Module: {record.module} @@ -77,10 +80,14 @@ Event provided Message: {record.message} - ''')).lstrip() + """ + ) +).lstrip() -TEST_FORMAT_STRING = u('[{record.level_name}] {record.channel}: {record.message}') -MAIL_FORMAT_STRING = dedent(u(''' +TEST_FORMAT_STRING = u("[{record.level_name}] {record.channel}: {record.message}") +MAIL_FORMAT_STRING = dedent( + u( + """ Subject: {handler.subject} Message type: {record.level_name} @@ -92,15 +99,21 @@ Message: {record.message} - ''')).lstrip() + """ + ) +).lstrip() -MAIL_RELATED_FORMAT_STRING = dedent(u(''' +MAIL_RELATED_FORMAT_STRING = dedent( + u( + """ Message type: {record.level_name} Location: {record.filename}:{record.lineno} Module: {record.module} Function: {record.func_name} {record.message} - ''')).lstrip() + """ + ) +).lstrip() SYSLOG_PORT = 514 @@ -112,7 +125,7 @@ def create_syshandler(application_name, level=NOTSET): this creates a :class:`SyslogHandler`, on Windows sytems it will create a :class:`NTEventLogHandler`. """ - if os.name == 'nt': + if os.name == "nt": return NTEventLogHandler(application_name, level=level) return SyslogHandler(application_name, level=level) @@ -127,8 +140,13 @@ class _HandlerType(type): def __new__(cls, name, bases, d): # aha, that thing has a custom close method. We will need a magic # __del__ for it to be called on cleanup. - if (bases != (ContextObject,) and 'close' in d and '__del__' not in d - and not any(hasattr(x, '__del__') for x in bases)): + if ( + bases != (ContextObject,) + and "close" in d + and "__del__" not in d + and not any(hasattr(x, "__del__") for x in bases) + ): + def _magic_del(self): try: self.close() @@ -136,7 +154,8 @@ def _magic_del(self): # del is also invoked when init fails, so we better just # ignore any exception that might be raised here pass - d['__del__'] = _magic_del + + d["__del__"] = _magic_del return type.__new__(cls, name, bases, d) @@ -183,6 +202,7 @@ class Handler(with_metaclass(_HandlerType), ContextObject): If gevent is enabled, the handler is aliased to `greenletbound`. """ + stack_manager = ContextStackManager() #: a flag for this handler that can be set to `True` for handlers that @@ -316,13 +336,16 @@ def handle_error(self, record, exc_info): Check :class:`Flags` for more information. """ try: - behaviour = Flags.get_flag('errors', 'print') - if behaviour == 'raise': + behaviour = Flags.get_flag("errors", "print") + if behaviour == "raise": reraise(exc_info[0], exc_info[1], exc_info[2]) - elif behaviour == 'print': + elif behaviour == "print": traceback.print_exception(*(exc_info + (None, sys.stderr))) - sys.stderr.write('Logged from file {}, line {}\n'.format( - record.filename, record.lineno)) + sys.stderr.write( + "Logged from file {}, line {}\n".format( + record.filename, record.lineno + ) + ) except OSError: pass @@ -338,11 +361,11 @@ class NullHandler(Handler): NullHandlers swallow all logs sent to them, and do not bubble them onwards. """ + blackhole = True def __init__(self, level=NOTSET, filter=None): - super().__init__(level=level, filter=filter, - bubble=False) + super().__init__(level=level, filter=filter, bubble=False) class WrapperHandler(Handler): @@ -357,7 +380,7 @@ class WrapperHandler(Handler): #: a set of direct attributes that are not forwarded to the inner #: handler. This has to be extended as necessary. - _direct_attrs = frozenset(['handler']) + _direct_attrs = frozenset(["handler"]) def __init__(self, handler): self.handler = handler @@ -398,12 +421,12 @@ def format_record(self, record, handler): except UnicodeEncodeError: # self._formatter is a str, but some of the record items # are unicode - fmt = self._formatter.decode('ascii', 'replace') + fmt = self._formatter.decode("ascii", "replace") return fmt.format(record=record, handler=handler) except UnicodeDecodeError: # self._formatter is unicode, but some of the record items # are non-ascii str - fmt = self._formatter.encode('ascii', 'replace') + fmt = self._formatter.encode("ascii", "replace") return fmt.format(record=record, handler=handler) def format_exception(self, record): @@ -413,7 +436,7 @@ def __call__(self, record, handler): line = self.format_record(record, handler) exc = self.format_exception(record) if exc: - line += u('\n') + exc + line += u("\n") + exc return line @@ -458,9 +481,9 @@ class HashingHandlerMixin: def hash_record_raw(self, record): """Returns a hashlib object with the hash of the record.""" hash = sha1() - hash.update(('%d\x00' % record.level).encode('ascii')) - hash.update((record.channel or u('')).encode('utf-8') + b('\x00')) - hash.update(record.filename.encode('utf-8') + b('\x00')) + hash.update(("%d\x00" % record.level).encode("ascii")) + hash.update((record.channel or u("")).encode("utf-8") + b("\x00")) + hash.update(record.filename.encode("utf-8") + b("\x00")) hash.update(b(str(record.lineno))) return hash @@ -473,6 +496,7 @@ def hash_record(self, record): """ return self.hash_record_raw(record).hexdigest() + _NUMBER_TYPES = integer_types + (float,) @@ -522,11 +546,12 @@ def check_delivery(self, record): first_count = last_count old_count = suppression_count - if (not suppression_count and - len(self._record_limits) >= self.max_record_cache): + if ( + not suppression_count + and len(self._record_limits) >= self.max_record_cache + ): cache_items = sorted(self._record_limits.items()) - del cache_items[:int(self._record_limits) - * self.record_cache_prune] + del cache_items[: int(self._record_limits) * self.record_cache_prune] self._record_limits = dict(cache_items) self._record_limits[hash] = (first_count, old_count + 1) @@ -555,8 +580,15 @@ class StreamHandler(Handler, StringFormatterHandlerMixin): passed that was opened in binary mode. """ - def __init__(self, stream, level=NOTSET, format_string=None, - encoding=None, filter=None, bubble=False): + def __init__( + self, + stream, + level=NOTSET, + format_string=None, + encoding=None, + filter=None, + bubble=False, + ): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) self.encoding = encoding @@ -585,19 +617,20 @@ def close(self): def flush(self): """Flushes the inner stream.""" - if self.stream is not None and hasattr(self.stream, 'flush'): + if self.stream is not None and hasattr(self.stream, "flush"): self.stream.flush() def encode(self, msg): """Encodes the message to the stream encoding.""" stream = self.stream - rv = msg + '\n' - if ((PY2 and is_unicode(rv)) or - not (PY2 or is_unicode(rv) or _is_text_stream(stream))): + rv = msg + "\n" + if (PY2 and is_unicode(rv)) or not ( + PY2 or is_unicode(rv) or _is_text_stream(stream) + ): enc = self.encoding if enc is None: - enc = getattr(stream, 'encoding', None) or 'utf-8' - rv = rv.encode(enc, 'replace') + enc = getattr(stream, "encoding", None) or "utf-8" + rv = rv.encode(enc, "replace") return rv def write(self, item): @@ -628,12 +661,22 @@ class FileHandler(StreamHandler): :class:`~logbook.FingersCrossedHandler` or something similar. """ - def __init__(self, filename, mode='a', encoding=None, level=NOTSET, - format_string=None, delay=False, filter=None, bubble=False): + def __init__( + self, + filename, + mode="a", + encoding=None, + level=NOTSET, + format_string=None, + delay=False, + filter=None, + bubble=False, + ): if encoding is None: - encoding = 'utf-8' - StreamHandler.__init__(self, None, level, format_string, - encoding, filter, bubble) + encoding = "utf-8" + StreamHandler.__init__( + self, None, level, format_string, encoding, filter, bubble + ) self._filename = filename self._mode = mode if delay: @@ -675,17 +718,35 @@ def ensure_stream_is_open(self): class GZIPCompressionHandler(FileHandler): - def __init__(self, filename, encoding=None, level=NOTSET, - format_string=None, delay=False, filter=None, bubble=False, compression_quality=9): - + def __init__( + self, + filename, + encoding=None, + level=NOTSET, + format_string=None, + delay=False, + filter=None, + bubble=False, + compression_quality=9, + ): self._compression_quality = compression_quality - super().__init__(filename, mode='wb', encoding=encoding, level=level, - format_string=format_string, delay=delay, filter=filter, bubble=bubble) + super().__init__( + filename, + mode="wb", + encoding=encoding, + level=level, + format_string=format_string, + delay=delay, + filter=filter, + bubble=bubble, + ) def _open(self, mode=None): if mode is None: mode = self._mode - self.stream = gzip.open(self._filename, mode, compresslevel=self._compression_quality) + self.stream = gzip.open( + self._filename, mode, compresslevel=self._compression_quality + ) def write(self, item): if isinstance(item, str): @@ -700,19 +761,39 @@ def should_flush(self): class BrotliCompressionHandler(FileHandler): - def __init__(self, filename, encoding=None, level=NOTSET, - format_string=None, delay=False, filter=None, bubble=False, - compression_window_size=4*1024**2, compression_quality=11): - super().__init__(filename, mode='wb', encoding=encoding, level=level, - format_string=format_string, delay=delay, filter=filter, bubble=bubble) + def __init__( + self, + filename, + encoding=None, + level=NOTSET, + format_string=None, + delay=False, + filter=None, + bubble=False, + compression_window_size=4 * 1024**2, + compression_quality=11, + ): + super().__init__( + filename, + mode="wb", + encoding=encoding, + level=level, + format_string=format_string, + delay=delay, + filter=filter, + bubble=bubble, + ) try: from brotli import Compressor except ImportError: - raise RuntimeError('The brotli library is required for ' - 'the BrotliCompressionHandler.') + raise RuntimeError( + "The brotli library is required for " "the BrotliCompressionHandler." + ) max_window_size = int(math.log(compression_window_size, 2)) - self._compressor = Compressor(quality=compression_quality, lgwin=max_window_size) + self._compressor = Compressor( + quality=compression_quality, lgwin=max_window_size + ) def _open(self, mode=None): if mode is None: @@ -756,13 +837,22 @@ class MonitoringFileHandler(FileHandler): work on a windows system. """ - def __init__(self, filename, mode='a', encoding='utf-8', level=NOTSET, - format_string=None, delay=False, filter=None, bubble=False): - FileHandler.__init__(self, filename, mode, encoding, level, - format_string, delay, filter, bubble) - if os.name == 'nt': - raise RuntimeError('MonitoringFileHandler ' - 'does not support Windows') + def __init__( + self, + filename, + mode="a", + encoding="utf-8", + level=NOTSET, + format_string=None, + delay=False, + filter=None, + bubble=False, + ): + FileHandler.__init__( + self, filename, mode, encoding, level, format_string, delay, filter, bubble + ) + if os.name == "nt": + raise RuntimeError("MonitoringFileHandler " "does not support Windows") self._query_fd() def _query_fd(self): @@ -806,10 +896,10 @@ class StderrHandler(StreamHandler): point to the old one. """ - def __init__(self, level=NOTSET, format_string=None, filter=None, - bubble=False): - StreamHandler.__init__(self, _missing, level, format_string, - None, filter, bubble) + def __init__(self, level=NOTSET, format_string=None, filter=None, bubble=False): + StreamHandler.__init__( + self, _missing, level, format_string, None, filter, bubble + ) @property def stream(self): @@ -830,15 +920,25 @@ class RotatingFileHandler(FileHandler): asking on rollover. """ - def __init__(self, filename, mode='a', encoding='utf-8', level=NOTSET, - format_string=None, delay=False, max_size=1024 * 1024, - backup_count=5, filter=None, bubble=False): - FileHandler.__init__(self, filename, mode, encoding, level, - format_string, delay, filter, bubble) + def __init__( + self, + filename, + mode="a", + encoding="utf-8", + level=NOTSET, + format_string=None, + delay=False, + max_size=1024 * 1024, + backup_count=5, + filter=None, + bubble=False, + ): + FileHandler.__init__( + self, filename, mode, encoding, level, format_string, delay, filter, bubble + ) self.max_size = max_size self.backup_count = backup_count - assert backup_count > 0, ('at least one backup file has to be ' - 'specified') + assert backup_count > 0, "at least one backup file has to be " "specified" def should_rollover(self, record, bytes): self.stream.seek(0, 2) @@ -847,16 +947,16 @@ def should_rollover(self, record, bytes): def perform_rollover(self): self.stream.close() for x in xrange(self.backup_count - 1, 0, -1): - src = '%s.%d' % (self._filename, x) - dst = '%s.%d' % (self._filename, x + 1) + src = "%s.%d" % (self._filename, x) + dst = "%s.%d" % (self._filename, x + 1) try: rename(src, dst) except OSError: e = sys.exc_info()[1] if e.errno != errno.ENOENT: raise - rename(self._filename, self._filename + '.1') - self._open('w') + rename(self._filename, self._filename + ".1") + self._open("w") def emit(self, record): msg = self.format(record) @@ -911,11 +1011,20 @@ class TimedRotatingFileHandler(FileHandler): until it is rolled over """ - def __init__(self, filename, mode='a', encoding='utf-8', level=NOTSET, - format_string=None, date_format='%Y-%m-%d', - backup_count=0, filter=None, bubble=False, - timed_filename_for_current=True, - rollover_format='{basename}-{timestamp}{ext}'): + def __init__( + self, + filename, + mode="a", + encoding="utf-8", + level=NOTSET, + format_string=None, + date_format="%Y-%m-%d", + backup_count=0, + filter=None, + bubble=False, + timed_filename_for_current=True, + rollover_format="{basename}-{timestamp}{ext}", + ): self.date_format = date_format self.backup_count = backup_count @@ -930,13 +1039,12 @@ def __init__(self, filename, mode='a', encoding='utf-8', level=NOTSET, filename = self.generate_timed_filename(self._timestamp) elif os.path.exists(filename): self._timestamp = self._get_timestamp( - datetime.fromtimestamp( - os.stat(filename).st_mtime - ) + datetime.fromtimestamp(os.stat(filename).st_mtime) ) - FileHandler.__init__(self, filename, mode, encoding, level, - format_string, True, filter, bubble) + FileHandler.__init__( + self, filename, mode, encoding, level, format_string, True, filter, bubble + ) def _get_timestamp(self, datetime): """ @@ -950,9 +1058,8 @@ def generate_timed_filename(self, timestamp): to the handler at init time. """ timed_filename = self.rollover_format.format( - basename=self.basename, - timestamp=timestamp, - ext=self.ext) + basename=self.basename, timestamp=timestamp, ext=self.ext + ) return timed_filename def files_to_delete(self): @@ -961,18 +1068,20 @@ def files_to_delete(self): """ directory = os.path.dirname(self._filename) files = [] - rollover_regex = re.compile(self.rollover_format.format( - basename=re.escape(self.basename), - timestamp='.+', - ext=re.escape(self.ext), - )) + rollover_regex = re.compile( + self.rollover_format.format( + basename=re.escape(self.basename), + timestamp=".+", + ext=re.escape(self.ext), + ) + ) for filename in os.listdir(directory): filename = os.path.join(directory, filename) if rollover_regex.match(filename): files.append((os.path.getmtime(filename), filename)) files.sort() if self.backup_count > 1: - return files[:-self.backup_count + 1] + return files[: -self.backup_count + 1] else: return files[:] @@ -980,10 +1089,7 @@ def perform_rollover(self, new_timestamp): if self.stream is not None: self.stream.close() - if ( - not self.timed_filename_for_current - and os.path.exists(self._filename) - ): + if not self.timed_filename_for_current and os.path.exists(self._filename): filename = self.generate_timed_filename(self._timestamp) os.rename(self._filename, filename) @@ -995,7 +1101,7 @@ def perform_rollover(self, new_timestamp): self._filename = self.generate_timed_filename(new_timestamp) self._timestamp = new_timestamp - self._open('w') + self._open("w") def emit(self, record): msg = self.format(record) @@ -1022,10 +1128,17 @@ def my_test(): assert logger.has_warning('A warning') ... """ + default_format_string = TEST_FORMAT_STRING - def __init__(self, level=NOTSET, format_string=None, filter=None, - bubble=False, force_heavy_init=False): + def __init__( + self, + level=NOTSET, + format_string=None, + filter=None, + bubble=False, + force_heavy_init=False, + ): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) #: captures the :class:`LogRecord`\s as instances @@ -1051,9 +1164,9 @@ def emit(self, record): @property def formatted_records(self): """Captures the formatted log records as unicode strings.""" - if (len(self._formatted_record_cache) != len(self.records) or - any(r1 != r2 for r1, r2 in - zip(self.records, self._formatted_record_cache))): + if len(self._formatted_record_cache) != len(self.records) or any( + r1 != r2 for r1, r2 in zip(self.records, self._formatted_record_cache) + ): self._formatted_records = [self.format(r) for r in self.records] self._formatted_record_cache = list(self.records) return self._formatted_records @@ -1098,7 +1211,7 @@ def has_critical(self, *args, **kwargs): See :ref:`probe-log-records` for more information. """ - kwargs['level'] = CRITICAL + kwargs["level"] = CRITICAL return self._test_for(*args, **kwargs) def has_error(self, *args, **kwargs): @@ -1106,7 +1219,7 @@ def has_error(self, *args, **kwargs): See :ref:`probe-log-records` for more information. """ - kwargs['level'] = ERROR + kwargs["level"] = ERROR return self._test_for(*args, **kwargs) def has_warning(self, *args, **kwargs): @@ -1114,7 +1227,7 @@ def has_warning(self, *args, **kwargs): See :ref:`probe-log-records` for more information. """ - kwargs['level'] = WARNING + kwargs["level"] = WARNING return self._test_for(*args, **kwargs) def has_notice(self, *args, **kwargs): @@ -1122,7 +1235,7 @@ def has_notice(self, *args, **kwargs): See :ref:`probe-log-records` for more information. """ - kwargs['level'] = NOTICE + kwargs["level"] = NOTICE return self._test_for(*args, **kwargs) def has_info(self, *args, **kwargs): @@ -1130,7 +1243,7 @@ def has_info(self, *args, **kwargs): See :ref:`probe-log-records` for more information. """ - kwargs['level'] = INFO + kwargs["level"] = INFO return self._test_for(*args, **kwargs) def has_debug(self, *args, **kwargs): @@ -1138,7 +1251,7 @@ def has_debug(self, *args, **kwargs): See :ref:`probe-log-records` for more information. """ - kwargs['level'] = DEBUG + kwargs["level"] = DEBUG return self._test_for(*args, **kwargs) def has_trace(self, *args, **kwargs): @@ -1146,7 +1259,7 @@ def has_trace(self, *args, **kwargs): See :ref:`probe-log-records` for more information. """ - kwargs['level'] = TRACE + kwargs["level"] = TRACE return self._test_for(*args, **kwargs) def _test_for(self, message=None, channel=None, level=None): @@ -1157,6 +1270,7 @@ def _match(needle, haystack): if needle == haystack: return True return False + for record in self.records: if level is not None and record.level != level: continue @@ -1168,8 +1282,7 @@ def _match(needle, haystack): return False -class MailHandler(Handler, StringFormatterHandlerMixin, - LimitingHandlerMixin): +class MailHandler(Handler, StringFormatterHandlerMixin, LimitingHandlerMixin): """A handler that sends error mails. The format string used by this handler are the contents of the mail plus the headers. This is handy if you want to use a custom subject or ``X-`` header:: @@ -1232,9 +1345,10 @@ class MailHandler(Handler, StringFormatterHandlerMixin, .. versionchanged:: 1.0 `secure` can now be a dictionary or boolean in addition to to a tuple. """ + default_format_string = MAIL_FORMAT_STRING default_related_format_string = MAIL_RELATED_FORMAT_STRING - default_subject = u('Server Error in Application') + default_subject = u("Server Error in Application") #: the maximum number of record hashes in the cache for the limiting #: feature. Afterwards, record_cache_prune percent of the oldest @@ -1244,11 +1358,23 @@ class MailHandler(Handler, StringFormatterHandlerMixin, #: the number of items to prune on a cache overflow in percent. record_cache_prune = 0.333 - def __init__(self, from_addr, recipients, subject=None, - server_addr=None, credentials=None, secure=None, - record_limit=None, record_delta=None, level=NOTSET, - format_string=None, related_format_string=None, - filter=None, bubble=False, starttls=True): + def __init__( + self, + from_addr, + recipients, + subject=None, + server_addr=None, + credentials=None, + secure=None, + record_limit=None, + record_delta=None, + level=NOTSET, + format_string=None, + related_format_string=None, + filter=None, + bubble=False, + starttls=True, + ): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) LimitingHandlerMixin.__init__(self, record_limit, record_delta) @@ -1274,8 +1400,10 @@ def _set_related_format_string(self, value): self.related_formatter = None else: self.related_formatter = self.formatter_class(value) - related_format_string = property(_get_related_format_string, - _set_related_format_string) + + related_format_string = property( + _get_related_format_string, _set_related_format_string + ) del _get_related_format_string, _set_related_format_string def get_recipients(self, record): @@ -1291,35 +1419,38 @@ def message_from_record(self, record, suppressed): """ from email.header import Header from email.message import Message + msg = Message() - msg.set_charset('utf-8') + msg.set_charset("utf-8") lineiter = iter(self.format(record).splitlines()) for line in lineiter: if not line: break - h, v = line.split(':', 1) + h, v = line.split(":", 1) # We could probably just encode everything. For the moment encode # only what really needed to avoid breaking a couple of tests. try: - v.encode('ascii') + v.encode("ascii") except UnicodeEncodeError: - msg[h.strip()] = Header(v.strip(), 'utf-8') + msg[h.strip()] = Header(v.strip(), "utf-8") else: msg[h.strip()] = v.strip() - msg.replace_header('Content-Transfer-Encoding', '8bit') + msg.replace_header("Content-Transfer-Encoding", "8bit") - body = '\r\n'.join(lineiter) + body = "\r\n".join(lineiter) if suppressed: - body += ('\r\n\r\nThis message occurred additional %d ' - 'time(s) and was suppressed' % suppressed) + body += ( + "\r\n\r\nThis message occurred additional %d " + "time(s) and was suppressed" % suppressed + ) # inconsistency in Python 2.5 # other versions correctly return msg.get_payload() as str if sys.version_info < (2, 6) and isinstance(body, unicode): - body = body.encode('utf-8') + body = body.encode("utf-8") - msg.set_payload(body, 'UTF-8') + msg.set_payload(body, "UTF-8") return msg def format_related_record(self, record): @@ -1334,24 +1465,28 @@ def generate_mail(self, record, suppressed=0): that were not send if the `record_limit` feature is active. """ from email.utils import formatdate + msg = self.message_from_record(record, suppressed) - msg['From'] = self.from_addr - msg['Date'] = formatdate() + msg["From"] = self.from_addr + msg["Date"] = formatdate() return msg def collapse_mails(self, mail, related, reason): - """When escaling or grouped mails are """ + """When escaling or grouped mails are""" if not related: return mail - if reason == 'group': - title = 'Other log records in the same group' + if reason == "group": + title = "Other log records in the same group" else: - title = 'Log records that led up to this one' - mail.set_payload('{}\r\n\r\n\r\n{}:\r\n\r\n{}'.format( - mail.get_payload(), - title, - '\r\n\r\n'.join(body.rstrip() for body in related) - ), 'UTF-8') + title = "Log records that led up to this one" + mail.set_payload( + "{}\r\n\r\n\r\n{}:\r\n\r\n{}".format( + mail.get_payload(), + title, + "\r\n\r\n".join(body.rstrip() for body in related), + ), + "UTF-8", + ) return mail def get_connection(self): @@ -1359,8 +1494,9 @@ def get_connection(self): each sent mail. """ from smtplib import SMTP, SMTP_PORT, SMTP_SSL, SMTP_SSL_PORT + if self.server_addr is None: - host = '127.0.0.1' + host = "127.0.0.1" port = self.secure and SMTP_SSL_PORT or SMTP_PORT else: try: @@ -1382,8 +1518,8 @@ def get_connection(self): # - secure=() equivalent to secure=True for backwards compatibility. # - secure=False equivalent to secure=None to disable. if isinstance(self.secure, collections_abc.Mapping): - keyfile = self.secure.get('keyfile', None) - certfile = self.secure.get('certfile', None) + keyfile = self.secure.get("keyfile", None) + certfile = self.secure.get("certfile", None) elif isinstance(self.secure, collections_abc.Iterable): # Allow empty tuple for backwards compatibility if len(self.secure) == 0: @@ -1441,17 +1577,18 @@ def emit(self, record): suppressed, allow_delivery = self.check_delivery(record) if not allow_delivery: return - self.deliver(self.generate_mail(record, suppressed), - self.get_recipients(record)) + self.deliver( + self.generate_mail(record, suppressed), self.get_recipients(record) + ) def emit_batch(self, records, reason): - if reason not in ('escalation', 'group'): + if reason not in ("escalation", "group"): raise RuntimeError("reason must be either 'escalation' or 'group'") records = list(records) if not records: return - trigger = records.pop(reason == 'escalation' and -1 or 0) + trigger = records.pop(reason == "escalation" and -1 or 0) suppressed = 0 if self.record_limit is not None: suppressed, allow_delivery = self.check_delivery(trigger) @@ -1459,11 +1596,12 @@ def emit_batch(self, records, reason): return trigger_mail = self.generate_mail(trigger, suppressed) - related = [self.format_related_record(record) - for record in records] + related = [self.format_related_record(record) for record in records] - self.deliver(self.collapse_mails(trigger_mail, related, reason), - self.get_recipients(trigger)) + self.deliver( + self.collapse_mails(trigger_mail, related, reason), + self.get_recipients(trigger), + ) class GMailHandler(MailHandler): @@ -1480,96 +1618,109 @@ class GMailHandler(MailHandler): def __init__(self, account_id, password, recipients, **kw): super().__init__( - account_id, recipients, secure=True, + account_id, + recipients, + secure=True, server_addr=("smtp.gmail.com", 587), - credentials=(account_id, password), **kw) + credentials=(account_id, password), + **kw, + ) class SyslogHandler(Handler, StringFormatterHandlerMixin): """A handler class which sends formatted logging records to a syslog server. By default it will send to it via unix socket. """ + default_format_string = SYSLOG_FORMAT_STRING # priorities - LOG_EMERG = 0 # system is unusable - LOG_ALERT = 1 # action must be taken immediately - LOG_CRIT = 2 # critical conditions - LOG_ERR = 3 # error conditions - LOG_WARNING = 4 # warning conditions - LOG_NOTICE = 5 # normal but significant condition - LOG_INFO = 6 # informational - LOG_DEBUG = 7 # debug-level messages + LOG_EMERG = 0 # system is unusable + LOG_ALERT = 1 # action must be taken immediately + LOG_CRIT = 2 # critical conditions + LOG_ERR = 3 # error conditions + LOG_WARNING = 4 # warning conditions + LOG_NOTICE = 5 # normal but significant condition + LOG_INFO = 6 # informational + LOG_DEBUG = 7 # debug-level messages # facility codes - LOG_KERN = 0 # kernel messages - LOG_USER = 1 # random user-level messages - LOG_MAIL = 2 # mail system - LOG_DAEMON = 3 # system daemons - LOG_AUTH = 4 # security/authorization messages - LOG_SYSLOG = 5 # messages generated internally by syslogd - LOG_LPR = 6 # line printer subsystem - LOG_NEWS = 7 # network news subsystem - LOG_UUCP = 8 # UUCP subsystem - LOG_CRON = 9 # clock daemon - LOG_AUTHPRIV = 10 # security/authorization messages (private) - LOG_FTP = 11 # FTP daemon + LOG_KERN = 0 # kernel messages + LOG_USER = 1 # random user-level messages + LOG_MAIL = 2 # mail system + LOG_DAEMON = 3 # system daemons + LOG_AUTH = 4 # security/authorization messages + LOG_SYSLOG = 5 # messages generated internally by syslogd + LOG_LPR = 6 # line printer subsystem + LOG_NEWS = 7 # network news subsystem + LOG_UUCP = 8 # UUCP subsystem + LOG_CRON = 9 # clock daemon + LOG_AUTHPRIV = 10 # security/authorization messages (private) + LOG_FTP = 11 # FTP daemon # other codes through 15 reserved for system use - LOG_LOCAL0 = 16 # reserved for local use - LOG_LOCAL1 = 17 # reserved for local use - LOG_LOCAL2 = 18 # reserved for local use - LOG_LOCAL3 = 19 # reserved for local use - LOG_LOCAL4 = 20 # reserved for local use - LOG_LOCAL5 = 21 # reserved for local use - LOG_LOCAL6 = 22 # reserved for local use - LOG_LOCAL7 = 23 # reserved for local use + LOG_LOCAL0 = 16 # reserved for local use + LOG_LOCAL1 = 17 # reserved for local use + LOG_LOCAL2 = 18 # reserved for local use + LOG_LOCAL3 = 19 # reserved for local use + LOG_LOCAL4 = 20 # reserved for local use + LOG_LOCAL5 = 21 # reserved for local use + LOG_LOCAL6 = 22 # reserved for local use + LOG_LOCAL7 = 23 # reserved for local use facility_names = { - 'auth': LOG_AUTH, - 'authpriv': LOG_AUTHPRIV, - 'cron': LOG_CRON, - 'daemon': LOG_DAEMON, - 'ftp': LOG_FTP, - 'kern': LOG_KERN, - 'lpr': LOG_LPR, - 'mail': LOG_MAIL, - 'news': LOG_NEWS, - 'syslog': LOG_SYSLOG, - 'user': LOG_USER, - 'uucp': LOG_UUCP, - 'local0': LOG_LOCAL0, - 'local1': LOG_LOCAL1, - 'local2': LOG_LOCAL2, - 'local3': LOG_LOCAL3, - 'local4': LOG_LOCAL4, - 'local5': LOG_LOCAL5, - 'local6': LOG_LOCAL6, - 'local7': LOG_LOCAL7, + "auth": LOG_AUTH, + "authpriv": LOG_AUTHPRIV, + "cron": LOG_CRON, + "daemon": LOG_DAEMON, + "ftp": LOG_FTP, + "kern": LOG_KERN, + "lpr": LOG_LPR, + "mail": LOG_MAIL, + "news": LOG_NEWS, + "syslog": LOG_SYSLOG, + "user": LOG_USER, + "uucp": LOG_UUCP, + "local0": LOG_LOCAL0, + "local1": LOG_LOCAL1, + "local2": LOG_LOCAL2, + "local3": LOG_LOCAL3, + "local4": LOG_LOCAL4, + "local5": LOG_LOCAL5, + "local6": LOG_LOCAL6, + "local7": LOG_LOCAL7, } level_priority_map = { - DEBUG: LOG_DEBUG, - INFO: LOG_INFO, - NOTICE: LOG_NOTICE, - WARNING: LOG_WARNING, - ERROR: LOG_ERR, - CRITICAL: LOG_CRIT + DEBUG: LOG_DEBUG, + INFO: LOG_INFO, + NOTICE: LOG_NOTICE, + WARNING: LOG_WARNING, + ERROR: LOG_ERR, + CRITICAL: LOG_CRIT, } - def __init__(self, application_name=None, address=None, - facility='user', socktype=socket.SOCK_DGRAM, - level=NOTSET, format_string=None, filter=None, - bubble=False, record_delimiter=None): + def __init__( + self, + application_name=None, + address=None, + facility="user", + socktype=socket.SOCK_DGRAM, + level=NOTSET, + format_string=None, + filter=None, + bubble=False, + record_delimiter=None, + ): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) self.application_name = application_name if address is None: - if sys.platform == 'darwin': - address = '/var/run/syslog' + if sys.platform == "darwin": + address = "/var/run/syslog" else: - address = '/dev/log' + address = "/dev/log" self.remote_address = self.address = address self.facility = facility @@ -1578,17 +1729,19 @@ def __init__(self, application_name=None, address=None, if isinstance(address, string_types): self._connect_unixsocket() self.enveloper = self.unix_envelope - default_delimiter = '\x00' + default_delimiter = "\x00" else: self._connect_netsocket() self.enveloper = self.net_envelope - default_delimiter = '\n' + default_delimiter = "\n" - self.record_delimiter = default_delimiter \ - if record_delimiter is None else record_delimiter + self.record_delimiter = ( + default_delimiter if record_delimiter is None else record_delimiter + ) self.connection_exception = getattr( - __builtins__, 'BrokenPipeError', socket.error) + __builtins__, "BrokenPipeError", socket.error + ) def _connect_unixsocket(self): self.unixsocket = True @@ -1609,20 +1762,19 @@ def _connect_netsocket(self): def encode_priority(self, record): facility = self.facility_names[self.facility] - priority = self.level_priority_map.get(record.level, - self.LOG_WARNING) + priority = self.level_priority_map.get(record.level, self.LOG_WARNING) return (facility << 3) | priority def wrap_segments(self, record, before): msg = self.format(record) segments = [segment for segment in msg.split(self.record_delimiter)] - return (before + segment + self.record_delimiter - for segment in segments) + return (before + segment + self.record_delimiter for segment in segments) def unix_envelope(self, record): - before = '<{}>{}'.format( + before = "<{}>{}".format( self.encode_priority(record), - self.application_name + ':' if self.application_name else '') + self.application_name + ":" if self.application_name else "", + ) return self.wrap_segments(record, before) def net_envelope(self, record): @@ -1630,19 +1782,22 @@ def net_envelope(self, record): try: format_string = self.format_string application_name = self.application_name - if not application_name and record.channel and \ - '{record.channel}: ' in format_string: - self.format_string = format_string.replace( - '{record.channel}: ', '') + if ( + not application_name + and record.channel + and "{record.channel}: " in format_string + ): + self.format_string = format_string.replace("{record.channel}: ", "") self.application_name = record.channel # RFC 5424: version timestamp hostname app-name procid # msgid structured-data message - before = '<{}>1 {}Z {} {} {} - - '.format( + before = "<{}>1 {}Z {} {} {} - - ".format( self.encode_priority(record), record.time.isoformat(), socket.gethostname(), - self.application_name if self.application_name else '-', - record.process) + self.application_name if self.application_name else "-", + record.process, + ) return self.wrap_segments(record, before) finally: self.format_string = format_string @@ -1650,7 +1805,7 @@ def net_envelope(self, record): def emit(self, record): for segment in self.enveloper(record): - self.send_to_socket(segment.encode('utf-8')) + self.send_to_socket(segment.encode("utf-8")) def send_to_socket(self, data): if self.unixsocket: @@ -1675,44 +1830,53 @@ def close(self): class NTEventLogHandler(Handler, StringFormatterHandlerMixin): """A handler that sends to the NT event log system.""" + dllname = None default_format_string = NTLOG_FORMAT_STRING - def __init__(self, application_name, log_type='Application', - level=NOTSET, format_string=None, filter=None, - bubble=False): + def __init__( + self, + application_name, + log_type="Application", + level=NOTSET, + format_string=None, + filter=None, + bubble=False, + ): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) - if os.name != 'nt': - raise RuntimeError('NTLogEventLogHandler requires a Windows ' - 'operating system.') + if os.name != "nt": + raise RuntimeError( + "NTLogEventLogHandler requires a Windows " "operating system." + ) try: import win32evtlog import win32evtlogutil except ImportError: - raise RuntimeError('The pywin32 library is required ' - 'for the NTEventLogHandler.') + raise RuntimeError( + "The pywin32 library is required " "for the NTEventLogHandler." + ) self.application_name = application_name self._welu = win32evtlogutil dllname = self.dllname if not dllname: - dllname = os.path.join(os.path.dirname(self._welu.__file__), - '../win32service.pyd') + dllname = os.path.join( + os.path.dirname(self._welu.__file__), "../win32service.pyd" + ) self.log_type = log_type - self._welu.AddSourceToRegistry(self.application_name, dllname, - log_type) + self._welu.AddSourceToRegistry(self.application_name, dllname, log_type) self._default_type = win32evtlog.EVENTLOG_INFORMATION_TYPE self._type_map = { - DEBUG: win32evtlog.EVENTLOG_INFORMATION_TYPE, - INFO: win32evtlog.EVENTLOG_INFORMATION_TYPE, - NOTICE: win32evtlog.EVENTLOG_INFORMATION_TYPE, - WARNING: win32evtlog.EVENTLOG_WARNING_TYPE, - ERROR: win32evtlog.EVENTLOG_ERROR_TYPE, - CRITICAL: win32evtlog.EVENTLOG_ERROR_TYPE + DEBUG: win32evtlog.EVENTLOG_INFORMATION_TYPE, + INFO: win32evtlog.EVENTLOG_INFORMATION_TYPE, + NOTICE: win32evtlog.EVENTLOG_INFORMATION_TYPE, + WARNING: win32evtlog.EVENTLOG_WARNING_TYPE, + ERROR: win32evtlog.EVENTLOG_ERROR_TYPE, + CRITICAL: win32evtlog.EVENTLOG_ERROR_TYPE, } def unregister_logger(self): @@ -1720,8 +1884,7 @@ def unregister_logger(self): this, the log viewer will no longer be able to provide any information about the message. """ - self._welu.RemoveSourceFromRegistry(self.application_name, - self.log_type) + self._welu.RemoveSourceFromRegistry(self.application_name, self.log_type) def get_event_type(self, record): return self._type_map.get(record.level, self._default_type) @@ -1742,8 +1905,9 @@ def emit(self, record): id = self.get_message_id(record) cat = self.get_event_category(record) type = self.get_event_type(record) - self._welu.ReportEvent(self.application_name, id, cat, type, - [self.format(record)]) + self._welu.ReportEvent( + self.application_name, id, cat, type, [self.format(record)] + ) class FingersCrossedHandler(Handler): @@ -1814,11 +1978,18 @@ def application(environ, start_response): #: ``'escalation'``. #: #: .. versionadded:: 0.3 - batch_emit_reason = 'escalation' - - def __init__(self, handler, action_level=ERROR, buffer_size=0, - pull_information=True, reset=False, filter=None, - bubble=False): + batch_emit_reason = "escalation" + + def __init__( + self, + handler, + action_level=ERROR, + buffer_size=0, + pull_information=True, + reset=False, + filter=None, + bubble=False, + ): Handler.__init__(self, NOTSET, filter, bubble) self.lock = new_fine_grained_lock() self._level = action_level @@ -1854,8 +2025,7 @@ def enqueue(self, record): self.buffered_records.append(record) if self._buffer_full: self.buffered_records.popleft() - elif (self.buffer_size and - len(self.buffered_records) >= self.buffer_size): + elif self.buffer_size and len(self.buffered_records) >= self.buffer_size: self._buffer_full = True return record.level >= self._level return False @@ -1863,7 +2033,7 @@ def enqueue(self, record): def rollover(self, record): if self._handler is None: self._handler = self._handler_factory(record, self) - self._handler.emit_batch(iter(self.buffered_records), 'escalation') + self._handler.emit_batch(iter(self.buffered_records), "escalation") self.buffered_records.clear() self._action_triggered = not self._reset @@ -1906,8 +2076,8 @@ class GroupHandler(WrapperHandler): .. versionadded:: 0.3 """ - _direct_attrs = frozenset(['handler', 'pull_information', - 'buffered_records']) + + _direct_attrs = frozenset(["handler", "pull_information", "buffered_records"]) def __init__(self, handler, pull_information=True): WrapperHandler.__init__(self, handler) @@ -1915,7 +2085,7 @@ def __init__(self, handler, pull_information=True): self.buffered_records = [] def rollover(self): - self.handler.emit_batch(self.buffered_records, 'group') + self.handler.emit_batch(self.buffered_records, "group") self.buffered_records = [] def pop_application(self): diff --git a/logbook/helpers.py b/logbook/helpers.py index d7b7b03..daccfd9 100644 --- a/logbook/helpers.py +++ b/logbook/helpers.py @@ -32,11 +32,14 @@ if PY2: from cStringIO import StringIO + iteritems = dict.iteritems from itertools import izip as zip + xrange = _builtins.xrange else: from io import StringIO + zip = _builtins.zip xrange = range iteritems = dict.items @@ -44,8 +47,10 @@ _IDENTITY = lambda obj: obj if PY2: + def u(s): return unicode(s, "unicode_escape") + else: u = _IDENTITY @@ -63,11 +68,16 @@ def u(s): if PY2: # Yucky, but apparently that's the only way to do this - exec(""" + exec( + """ def reraise(tp, value, tb=None): raise tp, value, tb -""", locals(), globals()) +""", + locals(), + globals(), + ) else: + def reraise(tp, value, tb=None): if value.__traceback__ is not tb: raise value.with_traceback(tb) @@ -78,29 +88,31 @@ def reraise(tp, value, tb=None): # some libraries (like the python xmlrpclib modules) use this _iso8601_re = re.compile( # date - r'(\d{4})(?:-?(\d{2})(?:-?(\d{2}))?)?' + r"(\d{4})(?:-?(\d{2})(?:-?(\d{2}))?)?" # time - r'(?:T(\d{2}):(\d{2})(?::(\d{2}(?:\.\d+)?))?(Z|[+-]\d{2}:\d{2})?)?$' + r"(?:T(\d{2}):(\d{2})(?::(\d{2}(?:\.\d+)?))?(Z|[+-]\d{2}:\d{2})?)?$" ) _missing = object() if PY2: + def b(x): return x def _is_text_stream(x): return True + else: import io def b(x): - return x.encode('ascii') + return x.encode("ascii") def _is_text_stream(stream): return isinstance(stream, io.TextIOBase) can_rename_open_file = False -if os.name == 'nt': +if os.name == "nt": try: import ctypes @@ -119,8 +131,9 @@ def _rename(src, dst): retry = 0 rv = False while not rv and retry < 100: - rv = _MoveFileEx(src, dst, _MOVEFILE_REPLACE_EXISTING | - _MOVEFILE_WRITE_THROUGH) + rv = _MoveFileEx( + src, dst, _MOVEFILE_REPLACE_EXISTING | _MOVEFILE_WRITE_THROUGH + ) if not rv: time.sleep(0.001) retry += 1 @@ -134,16 +147,21 @@ def _rename(src, dst): can_rename_open_file = True def _rename_atomic(src, dst): - ta = _CreateTransaction(None, 0, 0, 0, 0, 1000, 'Logbook rename') + ta = _CreateTransaction(None, 0, 0, 0, 0, 1000, "Logbook rename") if ta == -1: return False try: retry = 0 rv = False while not rv and retry < 100: - rv = _MoveFileTransacted(src, dst, None, None, - _MOVEFILE_REPLACE_EXISTING | - _MOVEFILE_WRITE_THROUGH, ta) + rv = _MoveFileTransacted( + src, + dst, + None, + None, + _MOVEFILE_REPLACE_EXISTING | _MOVEFILE_WRITE_THROUGH, + ta, + ) if rv: rv = _CommitTransaction(ta) break @@ -153,7 +171,9 @@ def _rename_atomic(src, dst): return rv finally: _CloseHandle(ta) + except Exception: + def _rename(src, dst): return False @@ -171,13 +191,14 @@ def rename(src, dst): e = sys.exc_info()[1] if e.errno not in (errno.EEXIST, errno.EACCES): raise - old = f"{dst}-{random.randint(0, 2 ** 31 - 1):08x}" + old = f"{dst}-{random.randint(0, 2**31 - 1):08x}" os.rename(dst, old) os.rename(src, dst) try: os.unlink(old) except Exception: pass + else: rename = os.rename can_rename_open_file = True @@ -189,11 +210,12 @@ def to_safe_json(data): """Makes a data structure safe for JSON silently discarding invalid objects from nested structures. This also converts dates. """ + def _convert(obj): if obj is None: return None elif PY2 and isinstance(obj, str): - return obj.decode('utf-8', 'replace') + return obj.decode("utf-8", "replace") elif isinstance(obj, _JSON_SIMPLE_TYPES): return obj elif isinstance(obj, datetime): @@ -211,6 +233,7 @@ def _convert(obj): key = u(key) rv[key] = _convert(value) return rv + return _convert(data) @@ -218,10 +241,10 @@ def format_iso8601(d=None): """Returns a date in iso8601 format.""" if d is None: d = datetime.utcnow() - rv = d.strftime('%Y-%m-%dT%H:%M:%S') + rv = d.strftime("%Y-%m-%dT%H:%M:%S") if d.microsecond: - rv += '.' + str(d.microsecond) - return rv + 'Z' + rv += "." + str(d.microsecond) + return rv + "Z" def parse_iso8601(value): @@ -230,7 +253,7 @@ def parse_iso8601(value): """ m = _iso8601_re.match(value) if m is None: - raise ValueError('not a valid iso8601 date value') + raise ValueError("not a valid iso8601 date value") groups = m.groups() args = [] @@ -240,19 +263,19 @@ def parse_iso8601(value): args.append(group) seconds = groups[-2] if seconds is not None: - if '.' in seconds: - sec, usec = seconds.split('.') + if "." in seconds: + sec, usec = seconds.split(".") args.append(int(sec)) - args.append(int(usec.ljust(6, '0'))) + args.append(int(usec.ljust(6, "0"))) else: args.append(int(seconds)) rv = datetime(*args) tz = groups[-1] - if tz and tz != 'Z': - args = [int(x) for x in tz[1:].split(':')] + if tz and tz != "Z": + args = [int(x) for x in tz[1:].split(":")] delta = timedelta(hours=args[0], minutes=args[1]) - if tz[0] == '+': + if tz[0] == "+": rv -= delta else: rv += delta @@ -262,7 +285,7 @@ def parse_iso8601(value): def get_application_name(): if not sys.argv or not sys.argv[0]: - return 'Python' + return "Python" return os.path.basename(sys.argv[0]).title() @@ -295,15 +318,20 @@ def is_unicode(x): return isinstance(x, unicode) return isinstance(x, str) + if PY2: - exec("""def with_metaclass(meta): + exec( + """def with_metaclass(meta): class _WithMetaclassBase(object): __metaclass__ = meta return _WithMetaclassBase -""") +""" + ) else: - exec("""def with_metaclass(meta): + exec( + """def with_metaclass(meta): class _WithMetaclassBase(object, metaclass=meta): pass return _WithMetaclassBase -""") +""" + ) diff --git a/logbook/more.py b/logbook/more.py index f09097d..503a0dc 100644 --- a/logbook/more.py +++ b/logbook/more.py @@ -30,7 +30,7 @@ import riemann_client.transport except ImportError: riemann_client = None - #from riemann_client.transport import TCPTransport, UDPTransport, BlankTransport + # from riemann_client.transport import TCPTransport, UDPTransport, BlankTransport if PY2: @@ -40,27 +40,25 @@ else: from urllib.parse import parse_qsl, urlencode -_ws_re = re.compile(r'(\s+)', re.UNICODE) -TWITTER_FORMAT_STRING = u( - '[{record.channel}] {record.level_name}: {record.message}') -TWITTER_ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token' -NEW_TWEET_URL = 'https://api.twitter.com/1/statuses/update.json' +_ws_re = re.compile(r"(\s+)", re.UNICODE) +TWITTER_FORMAT_STRING = u("[{record.channel}] {record.level_name}: {record.message}") +TWITTER_ACCESS_TOKEN_URL = "https://twitter.com/oauth/access_token" +NEW_TWEET_URL = "https://api.twitter.com/1/statuses/update.json" class CouchDBBackend(BackendBase): - """Implements a backend that writes into a CouchDB database. - """ + """Implements a backend that writes into a CouchDB database.""" + def setup_backend(self): from couchdb import Server - uri = self.options.pop('uri', u('')) + uri = self.options.pop("uri", u("")) couch = Server(uri) - db_name = self.options.pop('db') + db_name = self.options.pop("db") self.database = couch[db_name] def record_ticket(self, record, data, hash, app_id): - """Records a log record as ticket. - """ + """Records a log record as ticket.""" db = self.database ticket = record.to_dict() @@ -74,11 +72,11 @@ class TwitterFormatter(StringFormatter): """Works like the standard string formatter and is used by the :class:`TwitterHandler` unless changed. """ + max_length = 140 def format_exception(self, record): - return u('%s: %s') % (record.exception_shortname, - record.exception_message) + return u("%s: %s") % (record.exception_shortname, record.exception_message) def __call__(self, record, handler): formatted = StringFormatter.__call__(self, record, handler) @@ -88,10 +86,10 @@ def __call__(self, record, handler): length += len(piece) if length > self.max_length: if length - len(piece) < self.max_length: - rv.append(u('ā€¦')) + rv.append(u("ā€¦")) break rv.append(piece) - return u('').join(rv) + return u("").join(rv) class TaggingLogger(RecordDispatcher): @@ -117,18 +115,19 @@ class TaggingLogger(RecordDispatcher): def __init__(self, name=None, tags=None): RecordDispatcher.__init__(self, name) # create a method for each tag named - for tag in (tags or ()): + for tag in tags or (): setattr(self, tag, partial(self.log, tag)) def log(self, tags, msg, *args, **kwargs): if isinstance(tags, string_types): tags = [tags] - exc_info = kwargs.pop('exc_info', None) - extra = kwargs.pop('extra', {}) - extra['tags'] = list(tags) - frame_correction = kwargs.pop('frame_correction', 0) - return self.make_record_and_handle(NOTSET, msg, args, kwargs, - exc_info, extra, frame_correction) + exc_info = kwargs.pop("exc_info", None) + extra = kwargs.pop("extra", {}) + extra["tags"] = list(tags) + frame_correction = kwargs.pop("frame_correction", 0) + return self.make_record_and_handle( + NOTSET, msg, args, kwargs, exc_info, extra, frame_correction + ) class TaggingHandler(Handler): @@ -150,10 +149,11 @@ def __init__(self, handlers, filter=None, bubble=False): assert isinstance(handlers, dict) self._handlers = { tag: isinstance(handler, Handler) and [handler] or handler - for (tag, handler) in iteritems(handlers)} + for (tag, handler) in iteritems(handlers) + } def emit(self, record): - for tag in record.extra.get('tags', ()): + for tag in record.extra.get("tags", ()): for handler in self._handlers.get(tag, ()): handler.handle(record) @@ -168,12 +168,21 @@ class TwitterHandler(Handler, StringFormatterHandlerMixin): pairs from application explicitly whitelisted at Twitter (`leaked secrets `_). """ + default_format_string = TWITTER_FORMAT_STRING formatter_class = TwitterFormatter - def __init__(self, consumer_key, consumer_secret, username, - password, level=NOTSET, format_string=None, filter=None, - bubble=False): + def __init__( + self, + consumer_key, + consumer_secret, + username, + password, + level=NOTSET, + format_string=None, + filter=None, + bubble=False, + ): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) self.consumer_key = consumer_key @@ -184,35 +193,37 @@ def __init__(self, consumer_key, consumer_secret, username, try: import oauth2 except ImportError: - raise RuntimeError('The python-oauth2 library is required for ' - 'the TwitterHandler.') + raise RuntimeError( + "The python-oauth2 library is required for " "the TwitterHandler." + ) self._oauth = oauth2 self._oauth_token = None self._oauth_token_secret = None - self._consumer = oauth2.Consumer(consumer_key, - consumer_secret) + self._consumer = oauth2.Consumer(consumer_key, consumer_secret) self._client = oauth2.Client(self._consumer) def get_oauth_token(self): """Returns the oauth access token.""" if self._oauth_token is None: resp, content = self._client.request( - TWITTER_ACCESS_TOKEN_URL + '?', 'POST', - body=urlencode({ - 'x_auth_username': self.username.encode('utf-8'), - 'x_auth_password': self.password.encode('utf-8'), - 'x_auth_mode': 'client_auth' - }), - headers={'Content-Type': 'application/x-www-form-urlencoded'} + TWITTER_ACCESS_TOKEN_URL + "?", + "POST", + body=urlencode( + { + "x_auth_username": self.username.encode("utf-8"), + "x_auth_password": self.password.encode("utf-8"), + "x_auth_mode": "client_auth", + } + ), + headers={"Content-Type": "application/x-www-form-urlencoded"}, ) - if resp['status'] != '200': - raise RuntimeError('unable to login to Twitter') + if resp["status"] != "200": + raise RuntimeError("unable to login to Twitter") data = dict(parse_qsl(content)) - self._oauth_token = data['oauth_token'] - self._oauth_token_secret = data['oauth_token_secret'] - return self._oauth.Token(self._oauth_token, - self._oauth_token_secret) + self._oauth_token = data["oauth_token"] + self._oauth_token_secret = data["oauth_token_secret"] + return self._oauth.Token(self._oauth_token, self._oauth_token_secret) def make_client(self): """Creates a new oauth client auth a new access token.""" @@ -222,10 +233,12 @@ def tweet(self, status): """Tweets a given status. Status must not exceed 140 chars.""" client = self.make_client() resp, content = client.request( - NEW_TWEET_URL, 'POST', - body=urlencode({'status': status.encode('utf-8')}), - headers={'Content-Type': 'application/x-www-form-urlencoded'}) - return resp['status'] == '200' + NEW_TWEET_URL, + "POST", + body=urlencode({"status": status.encode("utf-8")}), + headers={"Content-Type": "application/x-www-form-urlencoded"}, + ) + return resp["status"] == "200" def emit(self, record): self.tweet(self.format(record)) @@ -238,9 +251,15 @@ class SlackHandler(Handler, StringFormatterHandlerMixin): slacker library has to be installed. """ - def __init__(self, api_token, channel, level=NOTSET, format_string=None, filter=None, - bubble=False): - + def __init__( + self, + api_token, + channel, + level=NOTSET, + format_string=None, + filter=None, + bubble=False, + ): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) self.api_token = api_token @@ -248,8 +267,9 @@ def __init__(self, api_token, channel, level=NOTSET, format_string=None, filter= try: from slacker import Slacker except ImportError: - raise RuntimeError('The slacker library is required for ' - 'the SlackHandler.') + raise RuntimeError( + "The slacker library is required for " "the SlackHandler." + ) self.channel = channel self.slack = Slacker(api_token) @@ -267,8 +287,9 @@ def __init__(self, template): try: from jinja2 import Template except ImportError: - raise RuntimeError('The jinja2 library is required for ' - 'the JinjaFormatter.') + raise RuntimeError( + "The jinja2 library is required for " "the JinjaFormatter." + ) self.template = Template(template) def __call__(self, record, handler): @@ -295,9 +316,15 @@ class ExternalApplicationHandler(Handler): .. versionadded:: 0.3 """ - def __init__(self, arguments, stdin_format=None, - encoding='utf-8', level=NOTSET, filter=None, - bubble=False): + def __init__( + self, + arguments, + stdin_format=None, + encoding="utf-8", + level=NOTSET, + filter=None, + bubble=False, + ): Handler.__init__(self, level, filter, bubble) self.encoding = encoding self._arguments = list(arguments) @@ -305,14 +332,13 @@ def __init__(self, arguments, stdin_format=None, stdin_format = stdin_format self._stdin_format = stdin_format import subprocess + self._subprocess = subprocess def emit(self, record): - args = [arg.format(record=record) - for arg in self._arguments] + args = [arg.format(record=record) for arg in self._arguments] if self._stdin_format is not None: - stdin_data = (self._stdin_format.format(record=record) - .encode(self.encoding)) + stdin_data = self._stdin_format.format(record=record).encode(self.encoding) stdin = self._subprocess.PIPE else: stdin = None @@ -331,16 +357,15 @@ class ColorizingStreamHandlerMixin: .. _`colorama`: https://pypi.org/project/colorama """ + _use_color = None def force_color(self): - """Force colorizing the stream (`should_colorize` will return True) - """ + """Force colorizing the stream (`should_colorize` will return True)""" self._use_color = True def forbid_color(self): - """Forbid colorizing the stream (`should_colorize` will return False) - """ + """Forbid colorizing the stream (`should_colorize` will return False)""" self._use_color = False def should_colorize(self, record): @@ -349,23 +374,23 @@ def should_colorize(self, record): stream is a tty. If we are executing on Windows, colorama must be installed. """ - if os.name == 'nt': + if os.name == "nt": try: import colorama except ImportError: return False if self._use_color is not None: return self._use_color - isatty = getattr(self.stream, 'isatty', None) + isatty = getattr(self.stream, "isatty", None) return isatty and isatty() def get_color(self, record): """Returns the color for this record.""" if record.level >= ERROR: - return 'red' + return "red" elif record.level >= NOTICE: - return 'yellow' - return 'lightgray' + return "yellow" + return "lightgray" def format(self, record): rv = super().format(record) @@ -387,6 +412,7 @@ class ColorizedStderrHandler(ColorizingStreamHandlerMixin, StderrHandler): .. _`colorama`: https://pypi.org/project/colorama """ + def __init__(self, *args, **kwargs): StderrHandler.__init__(self, *args, **kwargs) @@ -408,8 +434,13 @@ class FingersCrossedHandler(FingersCrossedHandlerBase): def __init__(self, *args, **kwargs): FingersCrossedHandlerBase.__init__(self, *args, **kwargs) from warnings import warn - warn(PendingDeprecationWarning('fingers crossed handler changed ' - 'location. It\'s now a core component of Logbook.')) + + warn( + PendingDeprecationWarning( + "fingers crossed handler changed " + "location. It's now a core component of Logbook." + ) + ) class ExceptionHandler(Handler, StringFormatterHandlerMixin): @@ -426,8 +457,10 @@ class ApplicationWarning(Exception): .. versionadded:: 0.3 """ - def __init__(self, exc_type, level=NOTSET, format_string=None, - filter=None, bubble=False): + + def __init__( + self, exc_type, level=NOTSET, format_string=None, filter=None, bubble=False + ): Handler.__init__(self, level, filter, bubble) StringFormatterHandlerMixin.__init__(self, format_string) self.exc_type = exc_type @@ -455,9 +488,10 @@ class DedupHandler(Handler): message repeated 2 times: foo message repeated 1 times: bar """ - def __init__(self, - format_string='message repeated {count} times: {message}', - *args, **kwargs): + + def __init__( + self, format_string="message repeated {count} times: {message}", *args, **kwargs + ): Handler.__init__(self, bubble=False, *args, **kwargs) self._format_string = format_string self.clear() @@ -491,8 +525,8 @@ def handle(self, record): def flush(self): for record in self._unique_ordered_records: record.message = self._format_string.format( - message=record.message, - count=self._message_to_count[record.message]) + message=record.message, count=self._message_to_count[record.message] + ) # record.dispatcher is the logger who created the message, # it's sometimes supressed (by logbook.info for example) if record.dispatcher is not None: @@ -509,15 +543,17 @@ class RiemannHandler(Handler): A handler that sends logs as events to Riemann. """ - def __init__(self, - host, - port, - message_type="tcp", - ttl=60, - flush_threshold=10, - bubble=False, - filter=None, - level=NOTSET): + def __init__( + self, + host, + port, + message_type="tcp", + ttl=60, + flush_threshold=10, + bubble=False, + filter=None, + level=NOTSET, + ): """ :param host: riemann host :param port: riemann port @@ -526,7 +562,9 @@ def __init__(self, :param flush_threshold: count of events after which we send to riemann """ if riemann_client is None: - raise NotImplementedError("The Riemann handler requires the riemann_client package") # pragma: no cover + raise NotImplementedError( + "The Riemann handler requires the riemann_client package" + ) # pragma: no cover Handler.__init__(self, level, filter, bubble) self.host = host self.port = port @@ -540,33 +578,37 @@ def __init__(self, elif message_type == "test": self.transport = riemann_client.transport.BlankTransport else: - msg = ("Currently supported message types for RiemannHandler are: {}. \ - {} is not supported." - .format(",".join(["tcp", "udp", "test"]), message_type)) + msg = "Currently supported message types for RiemannHandler are: {}. \ + {} is not supported.".format( + ",".join(["tcp", "udp", "test"]), message_type + ) raise RuntimeError(msg) def record_to_event(self, record): from time import time + tags = ["log", record.level_name] msg = str(record.exc_info[1]) if record.exc_info else record.msg channel_name = str(record.channel) if record.channel else "unknown" - if any([record.level_name == keywords - for keywords in ["ERROR", "EXCEPTION"]]): + if any([record.level_name == keywords for keywords in ["ERROR", "EXCEPTION"]]): state = "error" else: state = "ok" - return {"metric_f": 1.0, - "tags": tags, - "description": msg, - "time": int(time()), - "ttl": self.ttl, - "host": platform.node(), - "service": f"{channel_name}.{os.getpid()}", - "state": state - } + return { + "metric_f": 1.0, + "tags": tags, + "description": msg, + "time": int(time()), + "ttl": self.ttl, + "host": platform.node(), + "service": f"{channel_name}.{os.getpid()}", + "state": state, + } def _flush_events(self): - with riemann_client.client.QueuedClient(self.transport(self.host, self.port)) as cl: + with riemann_client.client.QueuedClient( + self.transport(self.host, self.port) + ) as cl: for event in self.queue: cl.event(**event) cl.flush() diff --git a/logbook/notifiers.py b/logbook/notifiers.py index ee738f3..52cc656 100644 --- a/logbook/notifiers.py +++ b/logbook/notifiers.py @@ -22,13 +22,12 @@ from urllib.parse import urlencode -def create_notification_handler(application_name=None, level=NOTSET, - icon=None): +def create_notification_handler(application_name=None, level=NOTSET, icon=None): """Creates a handler perfectly fit the current platform. On Linux systems this creates a :class:`LibNotifyHandler`, on OS X systems it will create a :class:`GrowlHandler`. """ - if sys.platform == 'darwin': + if sys.platform == "darwin": return GrowlHandler(application_name, level=level, icon=icon) return LibNotifyHandler(application_name, level=level, icon=icon) @@ -36,8 +35,15 @@ def create_notification_handler(application_name=None, level=NOTSET, class NotificationBaseHandler(Handler, LimitingHandlerMixin): """Baseclass for notification handlers.""" - def __init__(self, application_name=None, record_limit=None, - record_delta=None, level=NOTSET, filter=None, bubble=False): + def __init__( + self, + application_name=None, + record_limit=None, + record_delta=None, + level=NOTSET, + filter=None, + bubble=False, + ): Handler.__init__(self, level, filter, bubble) LimitingHandlerMixin.__init__(self, record_limit, record_delta) if application_name is None: @@ -46,7 +52,7 @@ def __init__(self, application_name=None, record_limit=None, def make_title(self, record): """Called to get the title from the record.""" - return u('%s: %s') % (record.channel, record.level_name.title()) + return u("%s: %s") % (record.channel, record.level_name.title()) def make_text(self, record): """Called to get the text of the record.""" @@ -58,29 +64,42 @@ class GrowlHandler(NotificationBaseHandler): py-Growl are installed. """ - def __init__(self, application_name=None, icon=None, host=None, - password=None, record_limit=None, record_delta=None, - level=NOTSET, filter=None, bubble=False): - NotificationBaseHandler.__init__(self, application_name, record_limit, - record_delta, level, filter, bubble) + def __init__( + self, + application_name=None, + icon=None, + host=None, + password=None, + record_limit=None, + record_delta=None, + level=NOTSET, + filter=None, + bubble=False, + ): + NotificationBaseHandler.__init__( + self, application_name, record_limit, record_delta, level, filter, bubble + ) # growl is using the deprecated md5 module, but we really don't need # to see that deprecation warning from warnings import filterwarnings - filterwarnings(module='Growl', category=DeprecationWarning, - action='ignore') + + filterwarnings(module="Growl", category=DeprecationWarning, action="ignore") try: import Growl + self._growl = Growl except ImportError: - raise RuntimeError('The growl module is not available. You have ' - 'to install either growl-py or py-Growl to ' - 'use the GrowlHandler.') + raise RuntimeError( + "The growl module is not available. You have " + "to install either growl-py or py-Growl to " + "use the GrowlHandler." + ) if icon is not None: if not os.path.isfile(icon): - raise OSError('Filename to an icon expected.') + raise OSError("Filename to an icon expected.") icon = self._growl.Image.imageFromPath(icon) else: try: @@ -91,10 +110,17 @@ def __init__(self, application_name=None, icon=None, host=None, self._notifier = self._growl.GrowlNotifier( applicationName=self.application_name, applicationIcon=icon, - notifications=['Notset', 'Debug', 'Info', 'Notice', 'Warning', - 'Error', 'Critical'], + notifications=[ + "Notset", + "Debug", + "Info", + "Notice", + "Warning", + "Error", + "Critical", + ], hostname=host, - password=password + password=password, ) self._notifier.register() @@ -118,11 +144,13 @@ def get_priority(self, record): def emit(self, record): if not self.check_delivery(record)[1]: return - self._notifier.notify(record.level_name.title(), - self.make_title(record), - self.make_text(record), - sticky=self.is_sticky(record), - priority=self.get_priority(record)) + self._notifier.notify( + record.level_name.title(), + self.make_title(record), + self.make_text(record), + sticky=self.is_sticky(record), + priority=self.get_priority(record), + ) class LibNotifyHandler(NotificationBaseHandler): @@ -130,18 +158,29 @@ class LibNotifyHandler(NotificationBaseHandler): If `no_init` is set to `True` the initialization of libnotify is skipped. """ - def __init__(self, application_name=None, icon=None, no_init=False, - record_limit=None, record_delta=None, level=NOTSET, - filter=None, bubble=False): - NotificationBaseHandler.__init__(self, application_name, record_limit, - record_delta, level, filter, bubble) + def __init__( + self, + application_name=None, + icon=None, + no_init=False, + record_limit=None, + record_delta=None, + level=NOTSET, + filter=None, + bubble=False, + ): + NotificationBaseHandler.__init__( + self, application_name, record_limit, record_delta, level, filter, bubble + ) try: import pynotify + self._pynotify = pynotify except ImportError: - raise RuntimeError('The pynotify library is required for ' - 'the LibNotifyHandler.') + raise RuntimeError( + "The pynotify library is required for " "the LibNotifyHandler." + ) self.icon = icon if not no_init: @@ -153,7 +192,7 @@ def set_notifier_icon(self, notifier, icon): from gtk import gdk except ImportError: # TODO: raise a warning? - raise RuntimeError('The gtk.gdk module is required to set an icon.') + raise RuntimeError("The gtk.gdk module is required to set an icon.") if icon is not None: if not isinstance(icon, gdk.Pixbuf): @@ -182,8 +221,9 @@ def get_urgency(self, record): def emit(self, record): if not self.check_delivery(record)[1]: return - notifier = self._pynotify.Notification(self.make_title(record), - self.make_text(record)) + notifier = self._pynotify.Notification( + self.make_title(record), self.make_text(record) + ) notifier.set_urgency(self.get_urgency(record)) notifier.set_timeout(self.get_expires(record)) self.set_notifier_icon(notifier, self.icon) @@ -194,12 +234,22 @@ class BoxcarHandler(NotificationBaseHandler): """Sends notifications to boxcar.io. Can be forwarded to your iPhone or other compatible device. """ - api_url = 'https://boxcar.io/notifications/' - def __init__(self, email, password, record_limit=None, record_delta=None, - level=NOTSET, filter=None, bubble=False): - NotificationBaseHandler.__init__(self, None, record_limit, - record_delta, level, filter, bubble) + api_url = "https://boxcar.io/notifications/" + + def __init__( + self, + email, + password, + record_limit=None, + record_delta=None, + level=NOTSET, + filter=None, + bubble=False, + ): + NotificationBaseHandler.__init__( + self, None, record_limit, record_delta, level, filter, bubble + ) self.email = email self.password = password @@ -210,19 +260,27 @@ def get_screen_name(self, record): def emit(self, record): if not self.check_delivery(record)[1]: return - body = urlencode({ - 'notification[from_screen_name]': - self.get_screen_name(record).encode('utf-8'), - 'notification[message]': - self.make_text(record).encode('utf-8'), - 'notification[from_remote_service_id]': str(int(time() * 100)) - }) - con = http_client.HTTPSConnection('boxcar.io') - con.request('POST', '/notifications/', headers={ - 'Authorization': 'Basic ' + - base64.b64encode((u('%s:%s') % (self.email, self.password)) - .encode('utf-8')).strip(), - }, body=body) + body = urlencode( + { + "notification[from_screen_name]": self.get_screen_name(record).encode( + "utf-8" + ), + "notification[message]": self.make_text(record).encode("utf-8"), + "notification[from_remote_service_id]": str(int(time() * 100)), + } + ) + con = http_client.HTTPSConnection("boxcar.io") + con.request( + "POST", + "/notifications/", + headers={ + "Authorization": "Basic " + + base64.b64encode( + (u("%s:%s") % (self.email, self.password)).encode("utf-8") + ).strip(), + }, + body=body, + ) con.close() @@ -231,18 +289,28 @@ class NotifoHandler(NotificationBaseHandler): iPhone, or other compatible device. """ - def __init__(self, application_name=None, username=None, secret=None, - record_limit=None, record_delta=None, level=NOTSET, - filter=None, bubble=False, hide_level=False): + def __init__( + self, + application_name=None, + username=None, + secret=None, + record_limit=None, + record_delta=None, + level=NOTSET, + filter=None, + bubble=False, + hide_level=False, + ): try: import notifo except ImportError: raise RuntimeError( - 'The notifo module is not available. You have ' - 'to install notifo to use the NotifoHandler.' + "The notifo module is not available. You have " + "to install notifo to use the NotifoHandler." ) - NotificationBaseHandler.__init__(self, None, record_limit, - record_delta, level, filter, bubble) + NotificationBaseHandler.__init__( + self, None, record_limit, record_delta, level, filter, bubble + ) self._notifo = notifo self.application_name = application_name self.username = username @@ -250,15 +318,20 @@ def __init__(self, application_name=None, username=None, secret=None, self.hide_level = hide_level def emit(self, record): - if self.hide_level: _level_name = None else: _level_name = self.level_name - self._notifo.send_notification(self.username, self.secret, None, - record.message, self.application_name, - _level_name, None) + self._notifo.send_notification( + self.username, + self.secret, + None, + record.message, + self.application_name, + _level_name, + None, + ) class PushoverHandler(NotificationBaseHandler): @@ -267,13 +340,23 @@ class PushoverHandler(NotificationBaseHandler): or 1, it is set to 0 automatically. """ - def __init__(self, application_name=None, apikey=None, userkey=None, - device=None, priority=0, sound=None, record_limit=None, - record_delta=None, level=NOTSET, filter=None, bubble=False, - max_title_len=100, max_message_len=512): - - super().__init__(None, record_limit, record_delta, - level, filter, bubble) + def __init__( + self, + application_name=None, + apikey=None, + userkey=None, + device=None, + priority=0, + sound=None, + record_limit=None, + record_delta=None, + level=NOTSET, + filter=None, + bubble=False, + max_title_len=100, + max_message_len=512, + ): + super().__init__(None, record_limit, record_delta, level, filter, bubble) self.application_name = application_name self.apikey = apikey @@ -295,7 +378,7 @@ def __init__(self, application_name=None, apikey=None, userkey=None, def _crop(self, msg, max_len): if max_len is not None and max_len > 0 and len(msg) > max_len: - return f"{msg[:max_len-3]}..." + return f"{msg[: max_len - 3]}..." else: return msg @@ -303,20 +386,20 @@ def emit(self, record): message = self._crop(record.message, self.max_message_len) body_dict = { - 'token': self.apikey, - 'user': self.userkey, - 'message': message, - 'priority': self.priority + "token": self.apikey, + "user": self.userkey, + "message": message, + "priority": self.priority, } if self.title is not None: - body_dict['title'] = self.title + body_dict["title"] = self.title if self.device is not None: - body_dict['device'] = self.device + body_dict["device"] = self.device if self.sound is not None: - body_dict['sound'] = self.sound + body_dict["sound"] = self.sound body = urlencode(body_dict) - con = http_client.HTTPSConnection('api.pushover.net') - con.request('POST', '/1/messages.json', body=body) + con = http_client.HTTPSConnection("api.pushover.net") + con.request("POST", "/1/messages.json", body=body) con.close() diff --git a/logbook/queues.py b/logbook/queues.py index 2d3a0ae..b68cfcc 100644 --- a/logbook/queues.py +++ b/logbook/queues.py @@ -44,25 +44,36 @@ class RedisHandler(Handler): More info about the default buffer size: wp.me/p3tYJu-3b """ - def __init__(self, host='127.0.0.1', port=6379, key='redis', - extra_fields=None, flush_threshold=128, flush_time=1, - level=NOTSET, filter=None, password=False, bubble=True, - context=None, push_method='rpush'): + + def __init__( + self, + host="127.0.0.1", + port=6379, + key="redis", + extra_fields=None, + flush_threshold=128, + flush_time=1, + level=NOTSET, + filter=None, + password=False, + bubble=True, + context=None, + push_method="rpush", + ): Handler.__init__(self, level, filter, bubble) try: import redis from redis import ResponseError except ImportError: - raise RuntimeError('The redis library is required for ' - 'the RedisHandler') + raise RuntimeError("The redis library is required for " "the RedisHandler") - self.redis = redis.Redis(host=host, port=port, password=password, - decode_responses=True) + self.redis = redis.Redis( + host=host, port=port, password=password, decode_responses=True + ) try: self.redis.ping() except ResponseError: - raise ResponseError( - 'The password provided is apparently incorrect') + raise ResponseError("The password provided is apparently incorrect") self.key = key self.extra_fields = extra_fields or {} self.flush_threshold = flush_threshold @@ -72,15 +83,14 @@ def __init__(self, host='127.0.0.1', port=6379, key='redis', # Set up a thread that flushes the queue every specified seconds self._stop_event = threading.Event() - self._flushing_t = threading.Thread(target=self._flush_task, - args=(flush_time, - self._stop_event)) + self._flushing_t = threading.Thread( + target=self._flush_task, args=(flush_time, self._stop_event) + ) self._flushing_t.daemon = True self._flushing_t.start() def _flush_task(self, time, stop_event): - """Calls the method _flush_buffer every certain time. - """ + """Calls the method _flush_buffer every certain time.""" while not self._stop_event.isSet(): with self.lock: self._flush_buffer() @@ -113,10 +123,12 @@ def emit(self, record): Extra values are also appended to the message. """ with self.lock: - r = {"message": record.msg, - "host": platform.node(), - "level": record.level_name, - "time": record.time.isoformat()} + r = { + "message": record.msg, + "host": platform.node(), + "level": record.level_name, + "time": record.time.isoformat(), + } r.update(self.extra_fields) r.update(record.kwargs) self.queue.append(json.dumps(r)) @@ -154,22 +166,23 @@ class MessageQueueHandler(Handler): .. _kombu: https://docs.celeryq.dev/projects/kombu/en/latest/introduction.html """ - def __init__(self, uri=None, queue='logging', level=NOTSET, - filter=None, bubble=False): + def __init__( + self, uri=None, queue="logging", level=NOTSET, filter=None, bubble=False + ): Handler.__init__(self, level, filter, bubble) try: import kombu except ImportError: - raise RuntimeError('The kombu library is required for ' - 'the RabbitMQSubscriber.') + raise RuntimeError( + "The kombu library is required for " "the RabbitMQSubscriber." + ) if uri: connection = kombu.Connection(uri) self.queue = connection.SimpleQueue(queue) def export_record(self, record): - """Exports the record into a dictionary ready for JSON dumping. - """ + """Exports the record into a dictionary ready for JSON dumping.""" return record.to_dict(json_safe=True) def emit(self, record): @@ -200,14 +213,22 @@ class ZeroMQHandler(Handler): handler = ZeroMQHandler('tcp://127.0.0.1:5000') """ - def __init__(self, uri=None, level=NOTSET, filter=None, bubble=False, - context=None, multi=False): + def __init__( + self, + uri=None, + level=NOTSET, + filter=None, + bubble=False, + context=None, + multi=False, + ): Handler.__init__(self, level, filter, bubble) try: import zmq except ImportError: - raise RuntimeError('The pyzmq library is required for ' - 'the ZeroMQHandler.') + raise RuntimeError( + "The pyzmq library is required for " "the ZeroMQHandler." + ) #: the zero mq context self.context = context or zmq.Context() @@ -227,8 +248,7 @@ def export_record(self, record): return record.to_dict(json_safe=True) def emit(self, record): - self.socket.send(json.dumps( - self.export_record(record)).encode("utf-8")) + self.socket.send(json.dumps(self.export_record(record)).encode("utf-8")) def close(self, linger=-1): self.socket.close(linger) @@ -350,11 +370,12 @@ class MessageQueueSubscriber(SubscriberBase): controller.stop() """ - def __init__(self, uri=None, queue='logging'): + + def __init__(self, uri=None, queue="logging"): try: import kombu except ImportError: - raise RuntimeError('The kombu library is required.') + raise RuntimeError("The kombu library is required.") if uri: connection = kombu.Connection(uri) @@ -430,8 +451,9 @@ def __init__(self, uri=None, context=None, multi=False): try: import zmq except ImportError: - raise RuntimeError('The pyzmq library is required for ' - 'the ZeroMQSubscriber.') + raise RuntimeError( + "The pyzmq library is required for " "the ZeroMQSubscriber." + ) self._zmq = zmq #: the zero mq context @@ -447,7 +469,7 @@ def __init__(self, uri=None, context=None, multi=False): self.socket = self.context.socket(zmq.SUB) if uri is not None: self.socket.connect(uri) - self.socket.setsockopt_unicode(zmq.SUBSCRIBE, u('')) + self.socket.setsockopt_unicode(zmq.SUBSCRIBE, u("")) def __del__(self): try: @@ -489,6 +511,7 @@ def _fix_261_mplog(): """ import logging import multiprocessing + logging.multiprocessing = multiprocessing @@ -555,6 +578,7 @@ class MultiProcessingSubscriber(SubscriberBase): def __init__(self, queue=None): if queue is None: from multiprocessing import Queue + queue = Queue(-1) self.queue = queue _fix_261_mplog() @@ -606,6 +630,7 @@ class TWHThreadController: queue and sends it to a handler. Both queue and handler are taken from the passed :class:`ThreadedWrapperHandler`. """ + class Command: stop = object() emit = object() @@ -626,7 +651,7 @@ def start(self): def stop(self): """Stops the task thread.""" if self.running: - self.wrapper_handler.queue.put_nowait((self.Command.stop, )) + self.wrapper_handler.queue.put_nowait((self.Command.stop,)) self._thread.join() self._thread = None @@ -638,7 +663,7 @@ def _target(self): self.running = False break elif command is self.Command.emit: - (record, ) = data + (record,) = data self.wrapper_handler.handler.emit(record) elif command is self.Command.emit_batch: record, reason = data @@ -661,7 +686,8 @@ class ThreadedWrapperHandler(WrapperHandler): >>> twh.handler.level_name 'WARNING' """ - _direct_attrs = frozenset(['handler', 'queue', 'controller']) + + _direct_attrs = frozenset(["handler", "queue", "controller"]) def __init__(self, handler, maxsize=0): WrapperHandler.__init__(self, handler) @@ -726,6 +752,7 @@ class SubscriberGroup(SubscriberBase): with target_handler: subscribers.dispatch_forever() """ + def __init__(self, subscribers=None, queue_limit=10): self.members = [] self.queue = ThreadQueue(queue_limit) diff --git a/logbook/ticketing.py b/logbook/ticketing.py index 0de613e..ed63c3b 100644 --- a/logbook/ticketing.py +++ b/logbook/ticketing.py @@ -32,7 +32,7 @@ def last_occurrence(self): if rv: return rv[0] - def get_occurrences(self, order_by='-time', limit=50, offset=0): + def get_occurrences(self, order_by="-time", limit=50, offset=0): """Returns the occurrences for this ticket.""" return self.db.get_occurrences(self.ticket_id, order_by, limit, offset) @@ -64,11 +64,11 @@ class Occurrence(LogRecord): """Represents an occurrence of a ticket.""" def __init__(self, db, row): - self.update_from_dict(json.loads(row['data'])) + self.update_from_dict(json.loads(row["data"])) self.db = db - self.time = row['time'] - self.ticket_id = row['ticket_id'] - self.occurrence_id = row['occurrence_id'] + self.time = row["time"] + self.ticket_id = row["ticket_id"] + self.occurrence_id = row["occurrence_id"] class BackendBase: @@ -90,8 +90,7 @@ def count_tickets(self): """Returns the number of tickets.""" raise NotImplementedError() - def get_tickets(self, order_by='-last_occurrence_time', - limit=50, offset=0): + def get_tickets(self, order_by="-last_occurrence_time", limit=50, offset=0): """Selects tickets from the database.""" raise NotImplementedError() @@ -107,7 +106,7 @@ def get_ticket(self, ticket_id): """Return a single ticket with all occurrences.""" raise NotImplementedError() - def get_occurrences(self, ticket, order_by='-time', limit=50, offset=0): + def get_occurrences(self, ticket, order_by="-time", limit=50, offset=0): """Selects occurrences from the database for a ticket.""" raise NotImplementedError() @@ -134,18 +133,20 @@ class SQLAlchemyBackend(BackendBase): def setup_backend(self): from sqlalchemy import MetaData, create_engine from sqlalchemy.orm import scoped_session, sessionmaker - engine_or_uri = self.options.pop('uri', None) - metadata = self.options.pop('metadata', None) - table_prefix = self.options.pop('table_prefix', 'logbook_') - if hasattr(engine_or_uri, 'execute'): + engine_or_uri = self.options.pop("uri", None) + metadata = self.options.pop("metadata", None) + table_prefix = self.options.pop("table_prefix", "logbook_") + + if hasattr(engine_or_uri, "execute"): self.engine = engine_or_uri else: # Pool recycle keeps connections from going stale, # which happens in MySQL Databases # Pool size is more custom for out stack - self.engine = create_engine(engine_or_uri, convert_unicode=True, - pool_recycle=360, pool_size=1000) + self.engine = create_engine( + engine_or_uri, convert_unicode=True, pool_recycle=360, pool_size=1000 + ) # Create session factory using session maker session = sessionmaker() @@ -162,7 +163,7 @@ def setup_backend(self): self.table_prefix = table_prefix self.metadata = metadata self.create_tables() - if self.options.get('autocreate_tables', True): + if self.options.get("autocreate_tables", True): self.metadata.create_all(bind=self.engine) def create_tables(self): @@ -172,33 +173,36 @@ def create_tables(self): import sqlalchemy as db def table(name, *args, **kwargs): - return db.Table(self.table_prefix + name, self.metadata, - *args, **kwargs) - self.tickets = table('tickets', - db.Column('ticket_id', db.Integer, - primary_key=True), - db.Column('record_hash', db.String(40), - unique=True), - db.Column('level', db.Integer), - db.Column('channel', db.String(120)), - db.Column('location', db.String(512)), - db.Column('module', db.String(256)), - db.Column('last_occurrence_time', db.DateTime), - db.Column('occurrence_count', db.Integer), - db.Column('solved', db.Boolean), - db.Column('app_id', db.String(80))) - self.occurrences = table('occurrences', - db.Column('occurrence_id', - db.Integer, primary_key=True), - db.Column('ticket_id', db.Integer, - db.ForeignKey(self.table_prefix + - 'tickets.ticket_id')), - db.Column('time', db.DateTime), - db.Column('data', db.Text), - db.Column('app_id', db.String(80))) + return db.Table(self.table_prefix + name, self.metadata, *args, **kwargs) + + self.tickets = table( + "tickets", + db.Column("ticket_id", db.Integer, primary_key=True), + db.Column("record_hash", db.String(40), unique=True), + db.Column("level", db.Integer), + db.Column("channel", db.String(120)), + db.Column("location", db.String(512)), + db.Column("module", db.String(256)), + db.Column("last_occurrence_time", db.DateTime), + db.Column("occurrence_count", db.Integer), + db.Column("solved", db.Boolean), + db.Column("app_id", db.String(80)), + ) + self.occurrences = table( + "occurrences", + db.Column("occurrence_id", db.Integer, primary_key=True), + db.Column( + "ticket_id", + db.Integer, + db.ForeignKey(self.table_prefix + "tickets.ticket_id"), + ), + db.Column("time", db.DateTime), + db.Column("data", db.Text), + db.Column("app_id", db.String(80)), + ) def _order(self, q, table, order_by): - if order_by[0] == '-': + if order_by[0] == "-": return q.order_by(table.c[order_by[1:]].desc()) return q.order_by(table.c[order_by]) @@ -210,30 +214,38 @@ def record_ticket(self, record, data, hash, app_id): q = self.tickets.select(self.tickets.c.record_hash == hash) row = s.execute(q).fetchone() if row is None: - row = s.execute(self.tickets.insert().values( - record_hash=hash, - level=record.level, - channel=record.channel or u(''), - location=u('%s:%d') % (record.filename, record.lineno), - module=record.module or u(''), - occurrence_count=0, - solved=False, - app_id=app_id - )) + row = s.execute( + self.tickets.insert().values( + record_hash=hash, + level=record.level, + channel=record.channel or u(""), + location=u("%s:%d") % (record.filename, record.lineno), + module=record.module or u(""), + occurrence_count=0, + solved=False, + app_id=app_id, + ) + ) ticket_id = row.inserted_primary_key[0] else: - ticket_id = row['ticket_id'] - s.execute(self.occurrences.insert() - .values(ticket_id=ticket_id, - time=record.time, - app_id=app_id, - data=json.dumps(data))) + ticket_id = row["ticket_id"] + s.execute( + self.occurrences.insert().values( + ticket_id=ticket_id, + time=record.time, + app_id=app_id, + data=json.dumps(data), + ) + ) s.execute( self.tickets.update() .where(self.tickets.c.ticket_id == ticket_id) - .values(occurrence_count=self.tickets.c.occurrence_count + 1, - last_occurrence_time=record.time, - solved=False)) + .values( + occurrence_count=self.tickets.c.occurrence_count + 1, + last_occurrence_time=record.time, + solved=False, + ) + ) s.commit() except Exception: s.rollback() @@ -245,41 +257,58 @@ def count_tickets(self): """Returns the number of tickets.""" return self.engine.execute(self.tickets.count()).fetchone()[0] - def get_tickets(self, order_by='-last_occurrence_time', limit=50, - offset=0): + def get_tickets(self, order_by="-last_occurrence_time", limit=50, offset=0): """Selects tickets from the database.""" - return [Ticket(self, row) for row in self.engine.execute( - self._order(self.tickets.select(), self.tickets, order_by) - .limit(limit).offset(offset)).fetchall()] + return [ + Ticket(self, row) + for row in self.engine.execute( + self._order(self.tickets.select(), self.tickets, order_by) + .limit(limit) + .offset(offset) + ).fetchall() + ] def solve_ticket(self, ticket_id): """Marks a ticket as solved.""" - self.engine.execute(self.tickets.update() - .where(self.tickets.c.ticket_id == ticket_id) - .values(solved=True)) + self.engine.execute( + self.tickets.update() + .where(self.tickets.c.ticket_id == ticket_id) + .values(solved=True) + ) def delete_ticket(self, ticket_id): """Deletes a ticket from the database.""" - self.engine.execute(self.occurrences.delete() - .where(self.occurrences.c.ticket_id == ticket_id)) - self.engine.execute(self.tickets.delete() - .where(self.tickets.c.ticket_id == ticket_id)) + self.engine.execute( + self.occurrences.delete().where(self.occurrences.c.ticket_id == ticket_id) + ) + self.engine.execute( + self.tickets.delete().where(self.tickets.c.ticket_id == ticket_id) + ) def get_ticket(self, ticket_id): """Return a single ticket with all occurrences.""" - row = self.engine.execute(self.tickets.select().where( - self.tickets.c.ticket_id == ticket_id)).fetchone() + row = self.engine.execute( + self.tickets.select().where(self.tickets.c.ticket_id == ticket_id) + ).fetchone() if row is not None: return Ticket(self, row) - def get_occurrences(self, ticket, order_by='-time', limit=50, offset=0): + def get_occurrences(self, ticket, order_by="-time", limit=50, offset=0): """Selects occurrences from the database for a ticket.""" - return [Occurrence(self, row) for row in - self.engine.execute(self._order( - self.occurrences.select() - .where(self.occurrences.c.ticket_id == ticket), - self.occurrences, order_by) - .limit(limit).offset(offset)).fetchall()] + return [ + Occurrence(self, row) + for row in self.engine.execute( + self._order( + self.occurrences.select().where( + self.occurrences.c.ticket_id == ticket + ), + self.occurrences, + order_by, + ) + .limit(limit) + .offset(offset) + ).fetchall() + ] class MongoDBBackend(BackendBase): @@ -292,11 +321,11 @@ def ticket_id(self): class _FixedOccurrenceClass(Occurrence): def __init__(self, db, row): - self.update_from_dict(json.loads(row['data'])) + self.update_from_dict(json.loads(row["data"])) self.db = db - self.time = row['time'] - self.ticket_id = row['ticket_id'] - self.occurrence_id = row['_id'] + self.time = row["time"] + self.ticket_id = row["ticket_id"] + self.occurrence_id = row["_id"] # TODO: Update connection setup once PYTHON-160 is solved. def setup_backend(self): @@ -304,24 +333,24 @@ def setup_backend(self): from pymongo.connection import Connection try: - from pymongo.uri_parser import parse_uri + from pymongo.uri_parser import parse_uri except ImportError: - from pymongo.connection import _parse_uri as parse_uri + from pymongo.connection import _parse_uri as parse_uri from pymongo.errors import AutoReconnect _connection = None - uri = self.options.pop('uri', u('')) + uri = self.options.pop("uri", u("")) _connection_attempts = 0 parsed_uri = parse_uri(uri, Connection.PORT) if type(parsed_uri) is tuple: - # pymongo < 2.0 - database = parsed_uri[1] + # pymongo < 2.0 + database = parsed_uri[1] else: - # pymongo >= 2.0 - database = parsed_uri['database'] + # pymongo >= 2.0 + database = parsed_uri["database"] # Handle auto reconnect signals properly while _connection_attempts < 5: @@ -337,94 +366,96 @@ def setup_backend(self): self.database = database # setup correct indexes - database.tickets.ensure_index([('record_hash', ASCENDING)], - unique=True) - database.tickets.ensure_index([('solved', ASCENDING), - ('level', ASCENDING)]) - database.occurrences.ensure_index([('time', DESCENDING)]) + database.tickets.ensure_index([("record_hash", ASCENDING)], unique=True) + database.tickets.ensure_index([("solved", ASCENDING), ("level", ASCENDING)]) + database.occurrences.ensure_index([("time", DESCENDING)]) def _order(self, q, order_by): from pymongo import ASCENDING, DESCENDING - col = '%s' % (order_by[0] == '-' and order_by[1:] or order_by) - if order_by[0] == '-': + + col = "%s" % (order_by[0] == "-" and order_by[1:] or order_by) + if order_by[0] == "-": return q.sort(col, DESCENDING) return q.sort(col, ASCENDING) def _oid(self, ticket_id): from pymongo.objectid import ObjectId + return ObjectId(ticket_id) def record_ticket(self, record, data, hash, app_id): """Records a log record as ticket.""" db = self.database - ticket = db.tickets.find_one({'record_hash': hash}) + ticket = db.tickets.find_one({"record_hash": hash}) if not ticket: doc = { - 'record_hash': hash, - 'level': record.level, - 'channel': record.channel or u(''), - 'location': u('%s:%d') % (record.filename, - record.lineno), - 'module': record.module or u(''), - 'occurrence_count': 0, - 'solved': False, - 'app_id': app_id, + "record_hash": hash, + "level": record.level, + "channel": record.channel or u(""), + "location": u("%s:%d") % (record.filename, record.lineno), + "module": record.module or u(""), + "occurrence_count": 0, + "solved": False, + "app_id": app_id, } ticket_id = db.tickets.insert(doc) else: - ticket_id = ticket['_id'] + ticket_id = ticket["_id"] - db.tickets.update({'_id': ticket_id}, { - '$inc': { - 'occurrence_count': 1 + db.tickets.update( + {"_id": ticket_id}, + { + "$inc": {"occurrence_count": 1}, + "$set": {"last_occurrence_time": record.time, "solved": False}, }, - '$set': { - 'last_occurrence_time': record.time, - 'solved': False - } - }) + ) # We store occurrences in a seperate collection so that # we can make it a capped collection optionally. - db.occurrences.insert({ - 'ticket_id': self._oid(ticket_id), - 'app_id': app_id, - 'time': record.time, - 'data': json.dumps(data), - }) + db.occurrences.insert( + { + "ticket_id": self._oid(ticket_id), + "app_id": app_id, + "time": record.time, + "data": json.dumps(data), + } + ) def count_tickets(self): """Returns the number of tickets.""" return self.database.tickets.count() - def get_tickets(self, order_by='-last_occurrence_time', limit=50, - offset=0): + def get_tickets(self, order_by="-last_occurrence_time", limit=50, offset=0): """Selects tickets from the database.""" - query = (self._order(self.database.tickets.find(), order_by) - .limit(limit).skip(offset)) + query = ( + self._order(self.database.tickets.find(), order_by) + .limit(limit) + .skip(offset) + ) return [self._FixedTicketClass(self, obj) for obj in query] def solve_ticket(self, ticket_id): """Marks a ticket as solved.""" - self.database.tickets.update({'_id': self._oid(ticket_id)}, - {'solved': True}) + self.database.tickets.update({"_id": self._oid(ticket_id)}, {"solved": True}) def delete_ticket(self, ticket_id): """Deletes a ticket from the database.""" - self.database.occurrences.remove({'ticket_id': self._oid(ticket_id)}) - self.database.tickets.remove({'_id': self._oid(ticket_id)}) + self.database.occurrences.remove({"ticket_id": self._oid(ticket_id)}) + self.database.tickets.remove({"_id": self._oid(ticket_id)}) def get_ticket(self, ticket_id): """Return a single ticket with all occurrences.""" - ticket = self.database.tickets.find_one({'_id': self._oid(ticket_id)}) + ticket = self.database.tickets.find_one({"_id": self._oid(ticket_id)}) if ticket: return Ticket(self, ticket) - def get_occurrences(self, ticket, order_by='-time', limit=50, offset=0): + def get_occurrences(self, ticket, order_by="-time", limit=50, offset=0): """Selects occurrences from the database for a ticket.""" collection = self.database.occurrences - occurrences = self._order(collection.find( - {'ticket_id': self._oid(ticket)} - ), order_by).limit(limit).skip(offset) + occurrences = ( + self._order(collection.find({"ticket_id": self._oid(ticket)}), order_by) + .limit(limit) + .skip(offset) + ) return [self._FixedOccurrenceClass(self, obj) for obj in occurrences] @@ -444,8 +475,8 @@ def hash_record_raw(self, record): if self.hash_salt is not None: hash_salt = self.hash_salt if not PY2 or isinstance(hash_salt, unicode): - hash_salt = hash_salt.encode('utf-8') - hash.update(b('\x00') + hash_salt) + hash_salt = hash_salt.encode("utf-8") + hash.update(b("\x00") + hash_salt) return hash @@ -473,15 +504,23 @@ class TicketingHandler(TicketingBaseHandler): #: :class:`SQLAlchemyBackend`. default_backend = SQLAlchemyBackend - def __init__(self, uri, app_id='generic', level=NOTSET, - filter=None, bubble=False, hash_salt=None, backend=None, - **db_options): + def __init__( + self, + uri, + app_id="generic", + level=NOTSET, + filter=None, + bubble=False, + hash_salt=None, + backend=None, + **db_options, + ): if hash_salt is None: - hash_salt = u('apphash-') + app_id + hash_salt = u("apphash-") + app_id TicketingBaseHandler.__init__(self, hash_salt, level, filter, bubble) if backend is None: backend = self.default_backend - db_options['uri'] = uri + db_options["uri"] = uri self.set_backend(backend, **db_options) self.app_id = app_id @@ -502,6 +541,6 @@ def record_ticket(self, record, data, hash): def emit(self, record): """Emits a single record and writes it to the database.""" - hash = self.hash_record(record).encode('utf-8') + hash = self.hash_record(record).encode("utf-8") data = self.process_record(record, hash) self.record_ticket(record, data, hash) diff --git a/logbook/utils.py b/logbook/utils.py index 2d485f5..7f676d0 100644 --- a/logbook/utils.py +++ b/logbook/utils.py @@ -8,7 +8,6 @@ class _SlowContextNotifier: - def __init__(self, threshold, func): self.timer = threading.Timer(threshold, func) @@ -20,7 +19,7 @@ def __exit__(self, *_): self.timer.cancel() -_slow_logger = Logger('Slow') +_slow_logger = Logger("Slow") def logged_if_slow(*args, **kwargs): @@ -37,16 +36,18 @@ def logged_if_slow(*args, **kwargs): The remaining parameters are passed to the :meth:`~logbook.base.LoggerMixin.log` method. """ - threshold = kwargs.pop('threshold', 1) - func = kwargs.pop('func', None) + threshold = kwargs.pop("threshold", 1) + func = kwargs.pop("func", None) if func is None: - logger = kwargs.pop('logger', _slow_logger) - level = kwargs.pop('level', DEBUG) + logger = kwargs.pop("logger", _slow_logger) + level = kwargs.pop("level", DEBUG) func = functools.partial(logger.log, level, *args, **kwargs) else: - if 'logger' in kwargs or 'level' in kwargs: - raise TypeError("If using deprecated func parameter, 'logger' and" - " 'level' arguments cannot be passed.") + if "logger" in kwargs or "level" in kwargs: + raise TypeError( + "If using deprecated func parameter, 'logger' and" + " 'level' arguments cannot be passed." + ) func = functools.partial(func, *args, **kwargs) return _SlowContextNotifier(threshold, func) @@ -55,6 +56,7 @@ def logged_if_slow(*args, **kwargs): class _Local(threading.local): enabled = True + _local = _Local() @@ -86,18 +88,19 @@ def forget_deprecation_locations(): def _write_deprecations_if_needed(message, frame_correction): if not _local.enabled: return - caller_location = _get_caller_location(frame_correction=frame_correction+1) + caller_location = _get_caller_location(frame_correction=frame_correction + 1) if caller_location not in _deprecation_locations: - _deprecation_logger.warning(message, frame_correction=frame_correction+1) + _deprecation_logger.warning(message, frame_correction=frame_correction + 1) _deprecation_locations.add(caller_location) def log_deprecation_message(message, frame_correction=0): - _write_deprecations_if_needed(f"Deprecation message: {message}", frame_correction=frame_correction+1) + _write_deprecations_if_needed( + f"Deprecation message: {message}", frame_correction=frame_correction + 1 + ) class _DeprecatedFunction: - def __init__(self, func, message, obj=None, objtype=None): super().__init__() self._func = func @@ -108,7 +111,7 @@ def __init__(self, func, message, obj=None, objtype=None): def _get_underlying_func(self): returned = self._func if isinstance(returned, classmethod): - if hasattr(returned, '__func__'): + if hasattr(returned, "__func__"): returned = returned.__func__ else: returned = returned.__get__(self._objtype).__func__ @@ -129,15 +132,14 @@ def __call__(self, *args, **kwargs): def _get_func_str(self): func = self._get_underlying_func() if self._objtype is not None: - return f'{self._objtype.__name__}.{func.__name__}' - return f'{func.__module__}.{func.__name__}' + return f"{self._objtype.__name__}.{func.__name__}" + return f"{func.__module__}.{func.__name__}" def __get__(self, obj, objtype): return self.bound_to(obj, objtype) def bound_to(self, obj, objtype): - return _DeprecatedFunction(self._func, self._message, obj=obj, - objtype=objtype) + return _DeprecatedFunction(self._func, self._message, obj=obj, objtype=objtype) @property def __name__(self): @@ -149,8 +151,7 @@ def __doc__(self): if returned: # pylint: disable=no-member returned += "\n.. deprecated\n" # pylint: disable=no-member if self._message: - returned += " {}".format( - self._message) # pylint: disable=no-member + returned += f" {self._message}" # pylint: disable=no-member return returned @__doc__.setter diff --git a/scripts/make-release.py b/scripts/make-release.py index d99c19e..ae1a42b 100644 --- a/scripts/make-release.py +++ b/scripts/make-release.py @@ -16,26 +16,28 @@ from datetime import date, datetime from subprocess import PIPE, Popen -_date_clean_re = re.compile(r'(\d+)(st|nd|rd|th)') +_date_clean_re = re.compile(r"(\d+)(st|nd|rd|th)") def parse_changelog(): - with open('CHANGES') as f: + with open("CHANGES") as f: lineiter = iter(f) for line in lineiter: - match = re.search(r'^Version\s+(.*)', line.strip()) + match = re.search(r"^Version\s+(.*)", line.strip()) if match is None: continue version = match.group(1).strip() - if lineiter.next().count('-') != len(match.group(0)): + if lineiter.next().count("-") != len(match.group(0)): continue while 1: change_info = lineiter.next().strip() if change_info: break - match = re.search(r'released on (\w+\s+\d+\w+\s+\d+)' - r'(?:, codename (.*))?(?i)', change_info) + match = re.search( + r"released on (\w+\s+\d+\w+\s+\d+)" r"(?:, codename (.*))?(?i)", + change_info, + ) if match is None: continue @@ -45,16 +47,16 @@ def parse_changelog(): def bump_version(version): try: - parts = map(int, version.split('.')) + parts = map(int, version.split(".")) except ValueError: - fail('Current version is not numeric') + fail("Current version is not numeric") parts[-1] += 1 - return '.'.join(map(str, parts)) + return ".".join(map(str, parts)) def parse_date(string): - string = _date_clean_re.sub(r'\1', string) - return datetime.strptime(string, '%B %d %Y') + string = _date_clean_re.sub(r"\1", string) + return datetime.strptime(string, "%B %d %Y") def set_filename_version(filename, version_number, pattern): @@ -64,25 +66,27 @@ def inject_version(match): before, old, after = match.groups() changed.append(True) return before + version_number + after + with open(filename) as f: - contents = re.sub(r"^(\s*%s\s*=\s*')(.+?)(')(?sm)" % pattern, - inject_version, f.read()) + contents = re.sub( + r"^(\s*%s\s*=\s*')(.+?)(')(?sm)" % pattern, inject_version, f.read() + ) if not changed: - fail('Could not find %s in %s', pattern, filename) + fail("Could not find %s in %s", pattern, filename) - with open(filename, 'w') as f: + with open(filename, "w") as f: f.write(contents) def set_version(version): - info('Setting version to %s', version) - with open('logbook/__version__.py', 'w') as f: - f.write(f'__version__ = {version!r}') + info("Setting version to %s", version) + with open("logbook/__version__.py", "w") as f: + f.write(f"__version__ = {version!r}") def fail(message, *args): - print >> sys.stderr, 'Error:', message % args + print >> sys.stderr, "Error:", message % args sys.exit(1) @@ -91,60 +95,65 @@ def info(message, *args): def get_git_tags(): - return set(Popen(['git', 'tag'], - stdout=PIPE).communicate()[0].splitlines()) + return set(Popen(["git", "tag"], stdout=PIPE).communicate()[0].splitlines()) def git_is_clean(): - return Popen(['git', 'diff', '--quiet']).wait() == 0 + return Popen(["git", "diff", "--quiet"]).wait() == 0 def make_git_commit(message, *args): message = message % args - Popen(['git', 'commit', '-am', message]).wait() + Popen(["git", "commit", "-am", message]).wait() def make_git_tag(tag): info('Tagging "%s"', tag) - Popen(['git', 'tag', tag]).wait() + Popen(["git", "tag", tag]).wait() parser = argparse.ArgumentParser("%prog [options]") -parser.add_argument("--no-upload", dest="upload", - action="store_false", default=True) +parser.add_argument("--no-upload", dest="upload", action="store_false", default=True) def main(): args = parser.parse_args() - os.chdir(os.path.join(os.path.dirname(__file__), '..')) + os.chdir(os.path.join(os.path.dirname(__file__), "..")) rv = parse_changelog() if rv is None: - fail('Could not parse changelog') + fail("Could not parse changelog") version, release_date, codename = rv - dev_version = bump_version(version) + '-dev' - - info('Releasing %s (codename %s, release date %s)', - version, codename, release_date.strftime('%d/%m/%Y')) + dev_version = bump_version(version) + "-dev" + + info( + "Releasing %s (codename %s, release date %s)", + version, + codename, + release_date.strftime("%d/%m/%Y"), + ) tags = get_git_tags() if version in tags: fail('Version "%s" is already tagged', version) if release_date.date() != date.today(): - fail('Release date is not today (%s != %s)' % - (release_date.date(), date.today())) + fail( + "Release date is not today ({} != {})".format( + release_date.date(), date.today() + ) + ) if not git_is_clean(): - fail('You have uncommitted changes in git') + fail("You have uncommitted changes in git") set_version(version) - make_git_commit('Bump version number to %s', version) + make_git_commit("Bump version number to %s", version) make_git_tag(version) set_version(dev_version) - make_git_commit('Bump version number to %s', dev_version) + make_git_commit("Bump version number to %s", dev_version) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/setup.py b/setup.py index 9feec42..cf06c4d 100644 --- a/setup.py +++ b/setup.py @@ -27,6 +27,7 @@ def status_msgs(*msgs): ext_modules = [] else: from Cython.Build import cythonize + ext_modules = cythonize( [Extension("logbook._speedups", sources=["logbook/_speedups.pyx"])], language_level=3, diff --git a/tests/conftest.py b/tests/conftest.py index 0ddd5b9..3bbeedc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,12 +9,11 @@ @pytest.fixture def logger(): - return logbook.Logger('testlogger') + return logbook.Logger("testlogger") @pytest.fixture def active_handler(request, test_handler, activation_strategy): - s = activation_strategy(test_handler) s.activate() @@ -31,7 +30,6 @@ def test_handler(): class ActivationStrategy: - def __init__(self, handler): super().__init__() self.handler = handler @@ -51,7 +49,6 @@ def __exit__(self, *_): class ContextEnteringStrategy(ActivationStrategy): - def activate(self): self.handler.__enter__() @@ -60,9 +57,9 @@ def deactivate(self): class PushingStrategy(ActivationStrategy): - def activate(self): from logbook.concurrency import is_gevent_enabled + if is_gevent_enabled(): self.handler.push_greenlet() else: @@ -70,6 +67,7 @@ def activate(self): def deactivate(self): from logbook.concurrency import is_gevent_enabled + if is_gevent_enabled(): self.handler.pop_greenlet() else: @@ -83,7 +81,7 @@ def activation_strategy(request): @pytest.fixture def logfile(tmpdir): - return str(tmpdir.join('logfile.log')) + return str(tmpdir.join("logfile.log")) @pytest.fixture @@ -93,17 +91,22 @@ def default_handler(request): request.addfinalizer(returned.pop_application) return returned + try: import gevent except ImportError: pass else: + @pytest.fixture(scope="module", autouse=True, params=[False, True]) def gevent(request): - module_name = getattr(request.module, '__name__', '') - if (not any(s in module_name for s in ('queues', 'processors')) - and request.param): + module_name = getattr(request.module, "__name__", "") + if ( + not any(s in module_name for s in ("queues", "processors")) + and request.param + ): from logbook.concurrency import _disable_gevent, enable_gevent + enable_gevent() @request.addfinalizer @@ -112,7 +115,9 @@ def fin(): def pytest_ignore_collect(path, config): - if 'test_asyncio.py' in path.basename and (sys.version_info.major < 3 or sys.version_info.minor < 5): + if "test_asyncio.py" in path.basename and ( + sys.version_info.major < 3 or sys.version_info.minor < 5 + ): return True return False diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 1297b63..50544b5 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -20,10 +20,12 @@ async def task(handler, msg): await asyncio.sleep(0) # allow for context switch - asyncio.get_event_loop().run_until_complete(asyncio.gather(task(h1, 'task1'), task(h2, 'task2'))) + asyncio.get_event_loop().run_until_complete( + asyncio.gather(task(h1, "task1"), task(h2, "task2")) + ) assert len(h1.records) == ITERATIONS - assert all(['task1' == r.msg for r in h1.records]) + assert all(["task1" == r.msg for r in h1.records]) assert len(h2.records) == ITERATIONS - assert all(['task2' == r.msg for r in h2.records]) + assert all(["task2" == r.msg for r in h2.records]) diff --git a/tests/test_ci.py b/tests/test_ci.py index 3fa34e6..087685d 100644 --- a/tests/test_ci.py +++ b/tests/test_ci.py @@ -7,15 +7,16 @@ @appveyor def test_appveyor_speedups(): - if os.environ.get('CYBUILD'): + if os.environ.get("CYBUILD"): import logbook._speedups else: with pytest.raises(ImportError): import logbook._speedups + @travis def test_travis_speedups(): - if os.environ.get('CYBUILD'): + if os.environ.get("CYBUILD"): import logbook._speedups else: with pytest.raises(ImportError): diff --git a/tests/test_deadlock.py b/tests/test_deadlock.py index f0cf3c7..037b349 100644 --- a/tests/test_deadlock.py +++ b/tests/test_deadlock.py @@ -29,8 +29,7 @@ def release(self): def test_deadlock_in_emit(): logbook_logger = logbook.Logger("logbook") obj = MyObject(logbook_logger.info) - stream_handler = logbook.StreamHandler(stream=sys.stderr, - level=logbook.DEBUG) + stream_handler = logbook.StreamHandler(stream=sys.stderr, level=logbook.DEBUG) stream_handler.lock = FakeLock() with stream_handler.applicationbound(): logbook_logger.info("format this: {}", obj) diff --git a/tests/test_file_handler.py b/tests/test_file_handler.py index 092a979..5524765 100644 --- a/tests/test_file_handler.py +++ b/tests/test_file_handler.py @@ -15,230 +15,233 @@ def test_file_handler(logfile, activation_strategy, logger): handler = logbook.FileHandler( logfile, - format_string='{record.level_name}:{record.channel}:{record.message}',) + format_string="{record.level_name}:{record.channel}:{record.message}", + ) with activation_strategy(handler): - logger.warn('warning message') + logger.warn("warning message") handler.close() with open(logfile) as f: - assert f.readline() == 'WARNING:testlogger:warning message\n' + assert f.readline() == "WARNING:testlogger:warning message\n" def test_file_handler_unicode(logfile, activation_strategy, logger): with capturing_stderr_context() as captured: with activation_strategy(logbook.FileHandler(logfile)): - logger.info(u('\u0431')) - assert (not captured.getvalue()) + logger.info(u("\u0431")) + assert not captured.getvalue() def test_file_handler_delay(logfile, activation_strategy, logger): handler = logbook.FileHandler( logfile, - format_string='{record.level_name}:{record.channel}:{record.message}', - delay=True) - assert (not os.path.isfile(logfile)) + format_string="{record.level_name}:{record.channel}:{record.message}", + delay=True, + ) + assert not os.path.isfile(logfile) with activation_strategy(handler): - logger.warn('warning message') + logger.warn("warning message") handler.close() with open(logfile) as f: - assert f.readline() == 'WARNING:testlogger:warning message\n' + assert f.readline() == "WARNING:testlogger:warning message\n" def test_monitoring_file_handler(logfile, activation_strategy, logger): - if os.name == 'nt': - pytest.skip( - 'unsupported on windows due to different IO (also unneeded)') + if os.name == "nt": + pytest.skip("unsupported on windows due to different IO (also unneeded)") handler = logbook.MonitoringFileHandler( logfile, - format_string='{record.level_name}:{record.channel}:{record.message}', - delay=True) + format_string="{record.level_name}:{record.channel}:{record.message}", + delay=True, + ) with activation_strategy(handler): - logger.warn('warning message') - os.rename(logfile, logfile + '.old') - logger.warn('another warning message') + logger.warn("warning message") + os.rename(logfile, logfile + ".old") + logger.warn("another warning message") handler.close() with open(logfile) as f: - assert f.read().strip() == 'WARNING:testlogger:another warning message' + assert f.read().strip() == "WARNING:testlogger:another warning message" def test_custom_formatter(activation_strategy, logfile, logger): def custom_format(record, handler): - return record.level_name + ':' + record.message + return record.level_name + ":" + record.message handler = logbook.FileHandler(logfile) with activation_strategy(handler): handler.formatter = custom_format - logger.warn('Custom formatters are awesome') + logger.warn("Custom formatters are awesome") with open(logfile) as f: - assert f.readline() == 'WARNING:Custom formatters are awesome\n' + assert f.readline() == "WARNING:Custom formatters are awesome\n" def test_rotating_file_handler(logfile, activation_strategy, logger): basename = os.path.basename(logfile) - handler = logbook.RotatingFileHandler(logfile, max_size=2048, - backup_count=3, - ) - handler.format_string = '{record.message}' + handler = logbook.RotatingFileHandler( + logfile, + max_size=2048, + backup_count=3, + ) + handler.format_string = "{record.message}" with activation_strategy(handler): for c, x in zip(LETTERS, xrange(32)): logger.warn(c * 256) - files = [x for x in os.listdir(os.path.dirname(logfile)) - if x.startswith(basename)] + files = [x for x in os.listdir(os.path.dirname(logfile)) if x.startswith(basename)] files.sort() - assert files == [basename, basename + - '.1', basename + '.2', basename + '.3'] + assert files == [basename, basename + ".1", basename + ".2", basename + ".3"] with open(logfile) as f: - assert f.readline().rstrip() == ('C' * 256) - assert f.readline().rstrip() == ('D' * 256) - assert f.readline().rstrip() == ('E' * 256) - assert f.readline().rstrip() == ('F' * 256) + assert f.readline().rstrip() == ("C" * 256) + assert f.readline().rstrip() == ("D" * 256) + assert f.readline().rstrip() == ("E" * 256) + assert f.readline().rstrip() == ("F" * 256) @pytest.mark.parametrize("backup_count", [1, 3]) def test_timed_rotating_file_handler(tmpdir, activation_strategy, backup_count): - basename = str(tmpdir.join('trot.log')) - handler = logbook.TimedRotatingFileHandler( - basename, backup_count=backup_count) - handler.format_string = '[{record.time:%H:%M}] {record.message}' + basename = str(tmpdir.join("trot.log")) + handler = logbook.TimedRotatingFileHandler(basename, backup_count=backup_count) + handler.format_string = "[{record.time:%H:%M}] {record.message}" - def fake_record(message, year, month, day, hour=0, - minute=0, second=0): - lr = logbook.LogRecord('Test Logger', logbook.WARNING, - message) + def fake_record(message, year, month, day, hour=0, minute=0, second=0): + lr = logbook.LogRecord("Test Logger", logbook.WARNING, message) lr.time = datetime(year, month, day, hour, minute, second) return lr with activation_strategy(handler): for x in xrange(10): - handler.handle(fake_record('First One', 2010, 1, 5, x + 1)) + handler.handle(fake_record("First One", 2010, 1, 5, x + 1)) for x in xrange(20): - handler.handle(fake_record('Second One', 2010, 1, 6, x + 1)) + handler.handle(fake_record("Second One", 2010, 1, 6, x + 1)) for x in xrange(10): - handler.handle(fake_record('Third One', 2010, 1, 7, x + 1)) + handler.handle(fake_record("Third One", 2010, 1, 7, x + 1)) for x in xrange(20): - handler.handle(fake_record('Last One', 2010, 1, 8, x + 1)) + handler.handle(fake_record("Last One", 2010, 1, 8, x + 1)) - files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith('trot')) + files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith("trot")) - assert files == [f'trot-2010-01-0{i}.log' - for i in xrange(5, 9)][-backup_count:] - with open(str(tmpdir.join('trot-2010-01-08.log'))) as f: - assert f.readline().rstrip() == '[01:00] Last One' - assert f.readline().rstrip() == '[02:00] Last One' + assert files == [f"trot-2010-01-0{i}.log" for i in xrange(5, 9)][-backup_count:] + with open(str(tmpdir.join("trot-2010-01-08.log"))) as f: + assert f.readline().rstrip() == "[01:00] Last One" + assert f.readline().rstrip() == "[02:00] Last One" if backup_count > 1: - with open(str(tmpdir.join('trot-2010-01-07.log'))) as f: - assert f.readline().rstrip() == '[01:00] Third One' - assert f.readline().rstrip() == '[02:00] Third One' + with open(str(tmpdir.join("trot-2010-01-07.log"))) as f: + assert f.readline().rstrip() == "[01:00] Third One" + assert f.readline().rstrip() == "[02:00] Third One" + @pytest.mark.parametrize("backup_count", [1, 3]) -def test_timed_rotating_file_handler__rollover_format(tmpdir, activation_strategy, backup_count): - basename = str(tmpdir.join('trot.log')) +def test_timed_rotating_file_handler__rollover_format( + tmpdir, activation_strategy, backup_count +): + basename = str(tmpdir.join("trot.log")) handler = logbook.TimedRotatingFileHandler( - basename, backup_count=backup_count, - rollover_format='{basename}{ext}.{timestamp}', + basename, + backup_count=backup_count, + rollover_format="{basename}{ext}.{timestamp}", ) - handler.format_string = '[{record.time:%H:%M}] {record.message}' + handler.format_string = "[{record.time:%H:%M}] {record.message}" - def fake_record(message, year, month, day, hour=0, - minute=0, second=0): - lr = logbook.LogRecord('Test Logger', logbook.WARNING, - message) + def fake_record(message, year, month, day, hour=0, minute=0, second=0): + lr = logbook.LogRecord("Test Logger", logbook.WARNING, message) lr.time = datetime(year, month, day, hour, minute, second) return lr with activation_strategy(handler): for x in xrange(10): - handler.handle(fake_record('First One', 2010, 1, 5, x + 1)) + handler.handle(fake_record("First One", 2010, 1, 5, x + 1)) for x in xrange(20): - handler.handle(fake_record('Second One', 2010, 1, 6, x + 1)) + handler.handle(fake_record("Second One", 2010, 1, 6, x + 1)) for x in xrange(10): - handler.handle(fake_record('Third One', 2010, 1, 7, x + 1)) + handler.handle(fake_record("Third One", 2010, 1, 7, x + 1)) for x in xrange(20): - handler.handle(fake_record('Last One', 2010, 1, 8, x + 1)) + handler.handle(fake_record("Last One", 2010, 1, 8, x + 1)) - files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith('trot')) + files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith("trot")) - assert files == [f'trot.log.2010-01-0{i}' - for i in xrange(5, 9)][-backup_count:] - with open(str(tmpdir.join('trot.log.2010-01-08'))) as f: - assert f.readline().rstrip() == '[01:00] Last One' - assert f.readline().rstrip() == '[02:00] Last One' + assert files == [f"trot.log.2010-01-0{i}" for i in xrange(5, 9)][-backup_count:] + with open(str(tmpdir.join("trot.log.2010-01-08"))) as f: + assert f.readline().rstrip() == "[01:00] Last One" + assert f.readline().rstrip() == "[02:00] Last One" if backup_count > 1: - with open(str(tmpdir.join('trot.log.2010-01-07'))) as f: - assert f.readline().rstrip() == '[01:00] Third One' - assert f.readline().rstrip() == '[02:00] Third One' + with open(str(tmpdir.join("trot.log.2010-01-07"))) as f: + assert f.readline().rstrip() == "[01:00] Third One" + assert f.readline().rstrip() == "[02:00] Third One" @pytest.mark.parametrize("backup_count", [1, 3]) @pytest.mark.parametrize("preexisting_file", [True, False]) def test_timed_rotating_file_handler__not_timed_filename_for_current( - tmpdir, activation_strategy, backup_count, preexisting_file + tmpdir, activation_strategy, backup_count, preexisting_file ): - basename = str(tmpdir.join('trot.log')) + basename = str(tmpdir.join("trot.log")) if preexisting_file: - with open(basename, 'w') as file: - file.write('contents') + with open(basename, "w") as file: + file.write("contents") jan_first = time.mktime(datetime(2010, 1, 1).timetuple()) os.utime(basename, (jan_first, jan_first)) handler = logbook.TimedRotatingFileHandler( basename, - format_string='[{record.time:%H:%M}] {record.message}', + format_string="[{record.time:%H:%M}] {record.message}", backup_count=backup_count, - rollover_format='{basename}{ext}.{timestamp}', + rollover_format="{basename}{ext}.{timestamp}", timed_filename_for_current=False, ) - def fake_record(message, year, month, day, hour=0, - minute=0, second=0): - lr = logbook.LogRecord('Test Logger', logbook.WARNING, - message) + def fake_record(message, year, month, day, hour=0, minute=0, second=0): + lr = logbook.LogRecord("Test Logger", logbook.WARNING, message) lr.time = datetime(year, month, day, hour, minute, second) return lr with activation_strategy(handler): for x in xrange(10): - handler.handle(fake_record('First One', 2010, 1, 5, x + 1)) + handler.handle(fake_record("First One", 2010, 1, 5, x + 1)) for x in xrange(20): - handler.handle(fake_record('Second One', 2010, 1, 6, x + 1)) + handler.handle(fake_record("Second One", 2010, 1, 6, x + 1)) for x in xrange(10): - handler.handle(fake_record('Third One', 2010, 1, 7, x + 1)) + handler.handle(fake_record("Third One", 2010, 1, 7, x + 1)) for x in xrange(20): - handler.handle(fake_record('Last One', 2010, 1, 8, x + 1)) + handler.handle(fake_record("Last One", 2010, 1, 8, x + 1)) - computed_files = [x for x in os.listdir(str(tmpdir)) if x.startswith('trot')] + computed_files = [x for x in os.listdir(str(tmpdir)) if x.startswith("trot")] - expected_files = ['trot.log.2010-01-01'] if preexisting_file else [] - expected_files += [f'trot.log.2010-01-0{i}' for i in xrange(5, 8)] - expected_files += ['trot.log'] + expected_files = ["trot.log.2010-01-01"] if preexisting_file else [] + expected_files += [f"trot.log.2010-01-0{i}" for i in xrange(5, 8)] + expected_files += ["trot.log"] expected_files = expected_files[-backup_count:] assert sorted(computed_files) == sorted(expected_files) - with open(str(tmpdir.join('trot.log'))) as f: - assert f.readline().rstrip() == '[01:00] Last One' - assert f.readline().rstrip() == '[02:00] Last One' + with open(str(tmpdir.join("trot.log"))) as f: + assert f.readline().rstrip() == "[01:00] Last One" + assert f.readline().rstrip() == "[02:00] Last One" if backup_count > 1: - with open(str(tmpdir.join('trot.log.2010-01-07'))) as f: - assert f.readline().rstrip() == '[01:00] Third One' - assert f.readline().rstrip() == '[02:00] Third One' + with open(str(tmpdir.join("trot.log.2010-01-07"))) as f: + assert f.readline().rstrip() == "[01:00] Third One" + assert f.readline().rstrip() == "[02:00] Third One" + def _decompress(input_file_name, use_gzip=True): if use_gzip: - with gzip.open(input_file_name, 'rb') as in_f: + with gzip.open(input_file_name, "rb") as in_f: return in_f.read().decode() else: - with open(input_file_name, 'rb') as in_f: + with open(input_file_name, "rb") as in_f: return brotli.decompress(in_f.read()).decode() + @pytest.mark.parametrize("use_gzip", [True, False]) def test_compression_file_handler(logfile, activation_strategy, logger, use_gzip): - handler = logbook.GZIPCompressionHandler(logfile) if use_gzip else logbook.BrotliCompressionHandler(logfile) - handler.format_string = '{record.level_name}:{record.channel}:{record.message}' + handler = ( + logbook.GZIPCompressionHandler(logfile) + if use_gzip + else logbook.BrotliCompressionHandler(logfile) + ) + handler.format_string = "{record.level_name}:{record.channel}:{record.message}" with activation_strategy(handler): - logger.warn('warning message') + logger.warn("warning message") handler.close() - assert _decompress(logfile, use_gzip) == 'WARNING:testlogger:warning message\n' + assert _decompress(logfile, use_gzip) == "WARNING:testlogger:warning message\n" diff --git a/tests/test_fingers_crossed_handler.py b/tests/test_fingers_crossed_handler.py index 49f4882..977e324 100644 --- a/tests/test_fingers_crossed_handler.py +++ b/tests/test_fingers_crossed_handler.py @@ -4,26 +4,25 @@ def test_fingerscrossed(activation_strategy, logger, default_handler): - handler = logbook.FingersCrossedHandler(default_handler, - logbook.WARNING) + handler = logbook.FingersCrossedHandler(default_handler, logbook.WARNING) # if no warning occurs, the infos are not logged with activation_strategy(handler): with capturing_stderr_context() as captured: - logger.info('some info') - assert captured.getvalue() == '' - assert (not handler.triggered) + logger.info("some info") + assert captured.getvalue() == "" + assert not handler.triggered # but if it does, all log messages are output with activation_strategy(handler): with capturing_stderr_context() as captured: - logger.info('some info') - logger.warning('something happened') - logger.info('something else happened') + logger.info("some info") + logger.warning("something happened") + logger.info("something else happened") logs = captured.getvalue() - assert 'some info' in logs - assert 'something happened' in logs - assert 'something else happened' in logs + assert "some info" in logs + assert "something happened" in logs + assert "something else happened" in logs assert handler.triggered @@ -36,42 +35,43 @@ def handler_factory(record, fch): return handler def make_fch(): - return logbook.FingersCrossedHandler(handler_factory, - logbook.WARNING) + return logbook.FingersCrossedHandler(handler_factory, logbook.WARNING) fch = make_fch() with activation_strategy(fch): - logger.info('some info') + logger.info("some info") assert len(handlers) == 0 - logger.warning('a warning') + logger.warning("a warning") assert len(handlers) == 1 - logger.error('an error') + logger.error("an error") assert len(handlers) == 1 assert handlers[0].has_infos assert handlers[0].has_warnings assert handlers[0].has_errors - assert (not handlers[0].has_notices) - assert (not handlers[0].has_criticals) - assert (not handlers[0].has_debugs) + assert not handlers[0].has_notices + assert not handlers[0].has_criticals + assert not handlers[0].has_debugs fch = make_fch() with activation_strategy(fch): - logger.info('some info') - logger.warning('a warning') + logger.info("some info") + logger.warning("a warning") assert len(handlers) == 2 def test_fingerscrossed_buffer_size(activation_strategy): - logger = logbook.Logger('Test') + logger = logbook.Logger("Test") test_handler = logbook.TestHandler() handler = logbook.FingersCrossedHandler(test_handler, buffer_size=3) with activation_strategy(handler): - logger.info('Never gonna give you up') - logger.warn('Aha!') - logger.warn('Moar!') - logger.error('Pure hate!') + logger.info("Never gonna give you up") + logger.warn("Aha!") + logger.warn("Moar!") + logger.error("Pure hate!") - assert test_handler.formatted_records == ['[WARNING] Test: Aha!', - '[WARNING] Test: Moar!', - '[ERROR] Test: Pure hate!'] + assert test_handler.formatted_records == [ + "[WARNING] Test: Aha!", + "[WARNING] Test: Moar!", + "[ERROR] Test: Pure hate!", + ] diff --git a/tests/test_flags.py b/tests/test_flags.py index 74a3215..f7c5fe0 100644 --- a/tests/test_flags.py +++ b/tests/test_flags.py @@ -7,27 +7,26 @@ def test_error_flag(logger): with capturing_stderr_context() as captured: - with logbook.Flags(errors='print'): - with logbook.Flags(errors='silent'): - logger.warn('Foo {42}', 'aha') - assert captured.getvalue() == '' + with logbook.Flags(errors="print"): + with logbook.Flags(errors="silent"): + logger.warn("Foo {42}", "aha") + assert captured.getvalue() == "" - with logbook.Flags(errors='silent'): - with logbook.Flags(errors='print'): - logger.warn('Foo {42}', 'aha') - assert captured.getvalue() != '' + with logbook.Flags(errors="silent"): + with logbook.Flags(errors="print"): + logger.warn("Foo {42}", "aha") + assert captured.getvalue() != "" with pytest.raises(Exception) as caught: - with logbook.Flags(errors='raise'): - logger.warn('Foo {42}', 'aha') - assert 'Could not format message with provided arguments' in str( - caught.value) + with logbook.Flags(errors="raise"): + logger.warn("Foo {42}", "aha") + assert "Could not format message with provided arguments" in str(caught.value) def test_disable_introspection(logger): with logbook.Flags(introspection=False): with logbook.TestHandler() as h: - logger.warn('Testing') + logger.warn("Testing") assert h.records[0].frame is None assert h.records[0].calling_frame is None assert h.records[0].module is None diff --git a/tests/test_groups.py b/tests/test_groups.py index c10960d..6f01e46 100644 --- a/tests/test_groups.py +++ b/tests/test_groups.py @@ -3,22 +3,23 @@ def test_groups(logger): def inject_extra(record): - record.extra['foo'] = 'bar' + record.extra["foo"] = "bar" + group = logbook.LoggerGroup(processor=inject_extra) group.level = logbook.ERROR group.add_logger(logger) with logbook.TestHandler() as handler: - logger.warn('A warning') - logger.error('An error') - assert (not handler.has_warning('A warning')) - assert handler.has_error('An error') - assert handler.records[0].extra['foo'] == 'bar' + logger.warn("A warning") + logger.error("An error") + assert not handler.has_warning("A warning") + assert handler.has_error("An error") + assert handler.records[0].extra["foo"] == "bar" def test_group_disabled(): group = logbook.LoggerGroup() - logger1 = logbook.Logger('testlogger1') - logger2 = logbook.Logger('testlogger2') + logger1 = logbook.Logger("testlogger1") + logger2 = logbook.Logger("testlogger2") group.add_logger(logger1) group.add_logger(logger2) @@ -28,8 +29,8 @@ def test_group_disabled(): group.disable() with logbook.TestHandler() as handler: - logger1.warn('Warning 1') - logger2.warn('Warning 2') + logger1.warn("Warning 1") + logger2.warn("Warning 2") assert not handler.has_warnings @@ -38,11 +39,11 @@ def test_group_disabled(): group.enable() with logbook.TestHandler() as handler: - logger1.warn('Warning 1') - logger2.warn('Warning 2') + logger1.warn("Warning 1") + logger2.warn("Warning 2") - assert handler.has_warning('Warning 1') - assert handler.has_warning('Warning 2') + assert handler.has_warning("Warning 1") + assert handler.has_warning("Warning 2") # Test group disabled, but logger explicitly enabled @@ -51,41 +52,41 @@ def test_group_disabled(): logger1.enable() with logbook.TestHandler() as handler: - logger1.warn('Warning 1') - logger2.warn('Warning 2') + logger1.warn("Warning 1") + logger2.warn("Warning 2") - assert handler.has_warning('Warning 1') - assert not handler.has_warning('Warning 2') + assert handler.has_warning("Warning 1") + assert not handler.has_warning("Warning 2") # Logger 1 will be enabled by using force=True group.disable(force=True) with logbook.TestHandler() as handler: - logger1.warn('Warning 1') - logger2.warn('Warning 2') + logger1.warn("Warning 1") + logger2.warn("Warning 2") - assert not handler.has_warning('Warning 1') - assert not handler.has_warning('Warning 2') + assert not handler.has_warning("Warning 1") + assert not handler.has_warning("Warning 2") # Enabling without force means logger 1 will still be disabled. group.enable() with logbook.TestHandler() as handler: - logger1.warn('Warning 1') - logger2.warn('Warning 2') + logger1.warn("Warning 1") + logger2.warn("Warning 2") - assert not handler.has_warning('Warning 1') - assert handler.has_warning('Warning 2') + assert not handler.has_warning("Warning 1") + assert handler.has_warning("Warning 2") # Force logger 1 enabled. group.enable(force=True) with logbook.TestHandler() as handler: - logger1.warn('Warning 1') - logger2.warn('Warning 2') + logger1.warn("Warning 1") + logger2.warn("Warning 2") - assert handler.has_warning('Warning 1') - assert handler.has_warning('Warning 2') + assert handler.has_warning("Warning 1") + assert handler.has_warning("Warning 2") diff --git a/tests/test_handler_errors.py b/tests/test_handler_errors.py index 6548e4b..a2ba099 100644 --- a/tests/test_handler_errors.py +++ b/tests/test_handler_errors.py @@ -8,41 +8,48 @@ from .utils import capturing_stderr_context __file_without_pyc__ = __file__ -if __file_without_pyc__.endswith('.pyc'): +if __file_without_pyc__.endswith(".pyc"): __file_without_pyc__ = __file_without_pyc__[:-1] def test_handler_exception(activation_strategy, logger): class ErroringHandler(logbook.TestHandler): - def emit(self, record): - raise RuntimeError('something bad happened') + raise RuntimeError("something bad happened") with capturing_stderr_context() as stderr: with activation_strategy(ErroringHandler()): - logger.warn('I warn you.') - assert 'something bad happened' in stderr.getvalue() - assert 'I warn you' not in stderr.getvalue() + logger.warn("I warn you.") + assert "something bad happened" in stderr.getvalue() + assert "I warn you" not in stderr.getvalue() def test_formatting_exception(): def make_record(): - return logbook.LogRecord('Test Logger', logbook.WARNING, - 'Hello {foo:invalid}', - kwargs={'foo': 42}, - frame=sys._getframe()) + return logbook.LogRecord( + "Test Logger", + logbook.WARNING, + "Hello {foo:invalid}", + kwargs={"foo": 42}, + frame=sys._getframe(), + ) + record = make_record() with pytest.raises(TypeError) as caught: record.message errormsg = str(caught.value) assert re.search( - 'Could not format message with provided arguments: Invalid ' - '(?:format specifier)|(?:conversion specification)|(?:format spec)', - errormsg, re.M | re.S) + "Could not format message with provided arguments: Invalid " + "(?:format specifier)|(?:conversion specification)|(?:format spec)", + errormsg, + re.M | re.S, + ) assert "msg='Hello {foo:invalid}'" in errormsg - assert 'args=()' in errormsg + assert "args=()" in errormsg assert "kwargs={'foo': 42}" in errormsg assert re.search( - r'Happened in file .*%s, line \d+' % re.escape(__file_without_pyc__), - errormsg, re.M | re.S) + r"Happened in file .*%s, line \d+" % re.escape(__file_without_pyc__), + errormsg, + re.M | re.S, + ) diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 4de4c61..04502dc 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -4,32 +4,34 @@ def test_custom_logger(activation_strategy, logger): - client_ip = '127.0.0.1' + client_ip = "127.0.0.1" class CustomLogger(logbook.Logger): - def process_record(self, record): - record.extra['ip'] = client_ip + record.extra["ip"] = client_ip - custom_log = CustomLogger('awesome logger') - fmt = ('[{record.level_name}] {record.channel}: ' - '{record.message} [{record.extra[ip]}]') + custom_log = CustomLogger("awesome logger") + fmt = ( + "[{record.level_name}] {record.channel}: " + "{record.message} [{record.extra[ip]}]" + ) handler = logbook.TestHandler(format_string=fmt) assert handler.format_string == fmt with activation_strategy(handler): - custom_log.warn('Too many sounds') + custom_log.warn("Too many sounds") logger.warn('"Music" playing') assert handler.formatted_records == [ - '[WARNING] awesome logger: Too many sounds [127.0.0.1]', - '[WARNING] testlogger: "Music" playing []'] + "[WARNING] awesome logger: Too many sounds [127.0.0.1]", + '[WARNING] testlogger: "Music" playing []', + ] def test_custom_handling(activation_strategy, logger): class MyTestHandler(logbook.TestHandler): def handle(self, record): - if record.extra.get('flag') != 'testing': + if record.extra.get("flag") != "testing": return False return logbook.TestHandler.handle(self, record) @@ -39,99 +41,98 @@ def handle(self, record): class MyLogger(logbook.Logger): def process_record(self, record): logbook.Logger.process_record(self, record) - record.extra['flag'] = 'testing' + record.extra["flag"] = "testing" log = MyLogger() handler = MyTestHandler() with capturing_stderr_context() as captured: with activation_strategy(handler): - log.warn('From my logger') - logger.warn('From another logger') - assert handler.has_warning('From my logger') - assert 'From another logger' in captured.getvalue() + log.warn("From my logger") + logger.warn("From another logger") + assert handler.has_warning("From my logger") + assert "From another logger" in captured.getvalue() def test_nested_setups(activation_strategy): with capturing_stderr_context() as captured: - logger = logbook.Logger('App') - test_handler = logbook.TestHandler(level='WARNING') + logger = logbook.Logger("App") + test_handler = logbook.TestHandler(level="WARNING") mail_handler = make_fake_mail_handler(bubble=True) - handlers = logbook.NestedSetup([ - logbook.NullHandler(), - test_handler, - mail_handler - ]) + handlers = logbook.NestedSetup( + [logbook.NullHandler(), test_handler, mail_handler] + ) with activation_strategy(handlers): - logger.warn('This is a warning') - logger.error('This is also a mail') + logger.warn("This is a warning") + logger.error("This is also a mail") try: 1 / 0 except Exception: logger.exception() - logger.warn('And here we go straight back to stderr') + logger.warn("And here we go straight back to stderr") - assert test_handler.has_warning('This is a warning') - assert test_handler.has_error('This is also a mail') + assert test_handler.has_warning("This is a warning") + assert test_handler.has_error("This is also a mail") assert len(mail_handler.mails) == 2 - assert 'This is also a mail' in mail_handler.mails[0][2] - assert '1 / 0' in mail_handler.mails[1][2] - assert 'And here we go straight back to stderr' in captured.getvalue() + assert "This is also a mail" in mail_handler.mails[0][2] + assert "1 / 0" in mail_handler.mails[1][2] + assert "And here we go straight back to stderr" in captured.getvalue() with activation_strategy(handlers): - logger.warn('threadbound warning') + logger.warn("threadbound warning") handlers.push_application() try: - logger.warn('applicationbound warning') + logger.warn("applicationbound warning") finally: handlers.pop_application() def test_filtering(activation_strategy): - logger1 = logbook.Logger('Logger1') - logger2 = logbook.Logger('Logger2') + logger1 = logbook.Logger("Logger1") + logger2 = logbook.Logger("Logger2") handler = logbook.TestHandler() outer_handler = logbook.TestHandler() def only_1(record, handler): return record.dispatcher is logger1 + handler.filter = only_1 with activation_strategy(outer_handler): with activation_strategy(handler): - logger1.warn('foo') - logger2.warn('bar') + logger1.warn("foo") + logger2.warn("bar") - assert handler.has_warning('foo', channel='Logger1') - assert (not handler.has_warning('bar', channel='Logger2')) - assert (not outer_handler.has_warning('foo', channel='Logger1')) - assert outer_handler.has_warning('bar', channel='Logger2') + assert handler.has_warning("foo", channel="Logger1") + assert not handler.has_warning("bar", channel="Logger2") + assert not outer_handler.has_warning("foo", channel="Logger1") + assert outer_handler.has_warning("bar", channel="Logger2") def test_different_context_pushing(activation_strategy): h1 = logbook.TestHandler(level=logbook.DEBUG) h2 = logbook.TestHandler(level=logbook.INFO) h3 = logbook.TestHandler(level=logbook.WARNING) - logger = logbook.Logger('Testing') + logger = logbook.Logger("Testing") with activation_strategy(h1): with activation_strategy(h2): with activation_strategy(h3): - logger.warn('Wuuu') - logger.info('still awesome') - logger.debug('puzzled') + logger.warn("Wuuu") + logger.info("still awesome") + logger.debug("puzzled") - assert h1.has_debug('puzzled') - assert h2.has_info('still awesome') - assert h3.has_warning('Wuuu') + assert h1.has_debug("puzzled") + assert h2.has_info("still awesome") + assert h3.has_warning("Wuuu") for handler in h1, h2, h3: assert len(handler.records) == 1 def test_default_handlers(logger): with capturing_stderr_context() as stream: - logger.warn('Aha!') + logger.warn("Aha!") captured = stream.getvalue() - assert 'WARNING: testlogger: Aha!' in captured + assert "WARNING: testlogger: Aha!" in captured diff --git a/tests/test_helpers.py b/tests/test_helpers.py index eedbcf4..04d627c 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -10,34 +10,43 @@ def test_jsonhelper(): class Bogus: def __str__(self): - return 'bogus' - - rv = to_safe_json([ + return "bogus" + + rv = to_safe_json( + [ + None, + "foo", + u("jƤger"), + 1, + datetime(2000, 1, 1), + {"jƤger1": 1, u("jƤger2"): 2, Bogus(): 3, "invalid": object()}, + object(), # invalid + ] + ) + + assert rv == [ None, - 'foo', - u('jƤger'), + u("foo"), + u("jƤger"), 1, - datetime(2000, 1, 1), - {'jƤger1': 1, u('jƤger2'): 2, Bogus(): 3, 'invalid': object()}, - object() # invalid - ]) - - assert rv == [None, u('foo'), u('jƤger'), 1, '2000-01-01T00:00:00Z', - {u('jƤger1'): 1, u('jƤger2'): 2, u('bogus'): 3, - u('invalid'): None}, None] + "2000-01-01T00:00:00Z", + {u("jƤger1"): 1, u("jƤger2"): 2, u("bogus"): 3, u("invalid"): None}, + None, + ] def test_datehelpers(): from logbook.helpers import format_iso8601, parse_iso8601 + now = datetime.now() rv = format_iso8601() assert rv[:4] == str(now.year) with pytest.raises(ValueError): - parse_iso8601('foo') - v = parse_iso8601('2000-01-01T00:00:00.12Z') + parse_iso8601("foo") + v = parse_iso8601("2000-01-01T00:00:00.12Z") assert v.microsecond == 120000 - v = parse_iso8601('2000-01-01T12:00:00+01:00') + v = parse_iso8601("2000-01-01T12:00:00+01:00") assert v.hour == 11 - v = parse_iso8601('2000-01-01T12:00:00-01:00') + v = parse_iso8601("2000-01-01T12:00:00-01:00") assert v.hour == 13 diff --git a/tests/test_log_record.py b/tests/test_log_record.py index 1c52fe6..05f5a8d 100644 --- a/tests/test_log_record.py +++ b/tests/test_log_record.py @@ -8,38 +8,38 @@ def test_exc_info_when_no_exceptions_exist(logger): with capturing_stderr_context() as captured: with logbook.StreamHandler(sys.stderr): - logger.debug('message', exc_info=True) - assert 'Traceback' not in captured.getvalue() + logger.debug("message", exc_info=True) + assert "Traceback" not in captured.getvalue() def test_exc_info_false(): with logbook.handlers.TestHandler() as handler: - logbook.debug('message here', exc_info=False) + logbook.debug("message here", exc_info=False) [record] = handler.records assert not record.formatted_exception def test_extradict(active_handler, logger): - logger.warn('Test warning') + logger.warn("Test warning") record = active_handler.records[0] - record.extra['existing'] = 'foo' - assert record.extra['nonexisting'] == '' - assert record.extra['existing'] == 'foo' + record.extra["existing"] = "foo" + assert record.extra["nonexisting"] == "" + assert record.extra["existing"] == "foo" def test_calling_frame(active_handler, logger): - logger.warn('test') + logger.warn("test") assert active_handler.records[0].calling_frame == sys._getframe() def test_frame_correction(active_handler, logger): def inner(): - logger.warn('test', frame_correction=+1) + logger.warn("test", frame_correction=+1) inner() assert active_handler.records[0].calling_frame == sys._getframe() def test_dispatcher(active_handler, logger): - logger.warn('Logbook is too awesome for stdlib') + logger.warn("Logbook is too awesome for stdlib") assert active_handler.records[0].dispatcher == logger diff --git a/tests/test_logbook.py b/tests/test_logbook.py index 8ea93f7..b5301cb 100644 --- a/tests/test_logbook.py +++ b/tests/test_logbook.py @@ -5,24 +5,24 @@ def test_global_functions(activation_strategy): with activation_strategy(logbook.TestHandler()) as handler: - logbook.debug('a debug message') - logbook.info('an info message') - logbook.warn('warning part 1') - logbook.warning('warning part 2') - logbook.notice('notice') - logbook.error('an error') - logbook.critical('pretty critical') - logbook.log(logbook.CRITICAL, 'critical too') + logbook.debug("a debug message") + logbook.info("an info message") + logbook.warn("warning part 1") + logbook.warning("warning part 2") + logbook.notice("notice") + logbook.error("an error") + logbook.critical("pretty critical") + logbook.log(logbook.CRITICAL, "critical too") - assert handler.has_debug('a debug message') - assert handler.has_info('an info message') - assert handler.has_warning('warning part 1') - assert handler.has_warning('warning part 2') - assert handler.has_notice('notice') - assert handler.has_error('an error') - assert handler.has_critical('pretty critical') - assert handler.has_critical('critical too') - assert handler.records[0].channel == 'Generic' + assert handler.has_debug("a debug message") + assert handler.has_info("an info message") + assert handler.has_warning("warning part 1") + assert handler.has_warning("warning part 2") + assert handler.has_notice("notice") + assert handler.has_error("an error") + assert handler.has_critical("pretty critical") + assert handler.has_critical("critical too") + assert handler.records[0].channel == "Generic" assert handler.records[0].dispatcher is None @@ -30,4 +30,4 @@ def test_level_lookup_failures(): with pytest.raises(LookupError): logbook.get_level_name(37) with pytest.raises(LookupError): - logbook.lookup_level('FOO') + logbook.lookup_level("FOO") diff --git a/tests/test_logger.py b/tests/test_logger.py index 908d653..536f808 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -5,11 +5,11 @@ def test_level_properties(logger): assert logger.level == logbook.NOTSET - assert logger.level_name == 'NOTSET' - logger.level_name = 'WARNING' + assert logger.level_name == "NOTSET" + logger.level_name = "WARNING" assert logger.level == logbook.WARNING logger.level = logbook.ERROR - assert logger.level_name == 'ERROR' + assert logger.level_name == "ERROR" def test_reflected_properties(logger): @@ -18,14 +18,14 @@ def test_reflected_properties(logger): assert logger.group == group group.level = logbook.ERROR assert logger.level == logbook.ERROR - assert logger.level_name == 'ERROR' + assert logger.level_name == "ERROR" group.level = logbook.WARNING assert logger.level == logbook.WARNING - assert logger.level_name == 'WARNING' + assert logger.level_name == "WARNING" logger.level = logbook.CRITICAL group.level = logbook.DEBUG assert logger.level == logbook.CRITICAL - assert logger.level_name == 'CRITICAL' + assert logger.level_name == "CRITICAL" group.remove_logger(logger) assert logger.group is None diff --git a/tests/test_logging_api.py b/tests/test_logging_api.py index e2dd3ec..5a864cb 100644 --- a/tests/test_logging_api.py +++ b/tests/test_logging_api.py @@ -8,11 +8,12 @@ def test_basic_logging(active_handler, logger): - logger.warn('This is a warning. Nice hah?') + logger.warn("This is a warning. Nice hah?") - assert active_handler.has_warning('This is a warning. Nice hah?') + assert active_handler.has_warning("This is a warning. Nice hah?") assert active_handler.formatted_records == [ - '[WARNING] testlogger: This is a warning. Nice hah?'] + "[WARNING] testlogger: This is a warning. Nice hah?" + ] def test_exception_catching(active_handler, logger): @@ -24,32 +25,29 @@ def test_exception_catching(active_handler, logger): try: 1 / 0 except Exception: - logger.exception('Awesome') - assert active_handler.has_error('Uncaught exception occurred') - assert active_handler.has_error('Awesome') + logger.exception("Awesome") + assert active_handler.has_error("Uncaught exception occurred") + assert active_handler.has_error("Awesome") assert active_handler.records[0].exc_info is not None - assert '1 / 0' in active_handler.records[0].formatted_exception + assert "1 / 0" in active_handler.records[0].formatted_exception def test_exception_catching_with_unicode(): - """ See https://github.com/getlogbook/logbook/issues/104 - """ + """See https://github.com/getlogbook/logbook/issues/104""" try: - raise Exception(u('\u202a test \u202c')) + raise Exception(u("\u202a test \u202c")) except: - r = logbook.LogRecord('channel', 'DEBUG', 'test', - exc_info=sys.exc_info()) + r = logbook.LogRecord("channel", "DEBUG", "test", exc_info=sys.exc_info()) r.exception_message -@pytest.mark.parametrize('as_tuple', [True, False]) +@pytest.mark.parametrize("as_tuple", [True, False]) def test_exc_info(as_tuple, logger, active_handler): try: 1 / 0 except Exception: exc_info = sys.exc_info() - logger.info("Exception caught", - exc_info=exc_info if as_tuple else True) + logger.info("Exception caught", exc_info=exc_info if as_tuple else True) assert active_handler.records[0].exc_info is not None assert active_handler.records[0].exc_info == exc_info @@ -65,7 +63,7 @@ def test_to_dict(logger, active_handler): record.close() imported = logbook.LogRecord.from_dict(exported) for key, value in iteritems(record.__dict__): - if key[0] == '_': + if key[0] == "_": continue assert value == getattr(imported, key) @@ -83,7 +81,7 @@ def test_pickle(active_handler, logger): exported = pickle.dumps(record, p) imported = pickle.loads(exported) for key, value in iteritems(record.__dict__): - if key[0] == '_': + if key[0] == "_": continue imported_value = getattr(imported, key) if isinstance(value, ZeroDivisionError): diff --git a/tests/test_logging_compat.py b/tests/test_logging_compat.py index 37e5640..c371c25 100644 --- a/tests/test_logging_compat.py +++ b/tests/test_logging_compat.py @@ -14,55 +14,56 @@ __file_without_pyc__ = __file_without_pyc__[:-1] -@pytest.mark.parametrize('set_root_logger_level', [True, False]) +@pytest.mark.parametrize("set_root_logger_level", [True, False]) def test_basic_compat(request, set_root_logger_level): import logging from logbook.compat import redirected_logging # mimic the default logging setting - request.addfinalizer(functools.partial( - logging.root.setLevel, logging.root.level)) + request.addfinalizer(functools.partial(logging.root.setLevel, logging.root.level)) logging.root.setLevel(logging.WARNING) - name = 'test_logbook-%d' % randrange(1 << 32) + name = "test_logbook-%d" % randrange(1 << 32) logger = logging.getLogger(name) with logbook.TestHandler(bubble=True) as handler: with capturing_stderr_context() as captured: with redirected_logging(set_root_logger_level): - logger.debug('This is from the old system') - logger.info('This is from the old system') - logger.warning('This is from the old %s', 'system') - logger.error('This is from the old system') - logger.critical('This is from the old system') - logger.error('This is a %(what)s %(where)s', {'what': 'mapping', 'where': 'test'}) - assert ('WARNING: %s: This is from the old system' % - name) in captured.getvalue() - assert ('ERROR: %s: This is a mapping test' % - name) in captured.getvalue() + logger.debug("This is from the old system") + logger.info("This is from the old system") + logger.warning("This is from the old %s", "system") + logger.error("This is from the old system") + logger.critical("This is from the old system") + logger.error( + "This is a %(what)s %(where)s", {"what": "mapping", "where": "test"} + ) + assert ( + "WARNING: %s: This is from the old system" % name + ) in captured.getvalue() + assert ("ERROR: %s: This is a mapping test" % name) in captured.getvalue() if set_root_logger_level: assert handler.records[0].level == logbook.DEBUG else: assert handler.records[0].level == logbook.WARNING - assert handler.records[0].msg == 'This is from the old %s' + assert handler.records[0].msg == "This is from the old %s" def test_redirect_logbook(): import logging + out = StringIO() logger = logging.getLogger() - logbook_logger = logbook.Logger('testlogger') + logbook_logger = logbook.Logger("testlogger") old_handlers = logger.handlers[:] handler = logging.StreamHandler(out) - handler.setFormatter(logging.Formatter( - '%(name)s:%(levelname)s:%(message)s')) + handler.setFormatter(logging.Formatter("%(name)s:%(levelname)s:%(message)s")) logger.handlers[:] = [handler] try: with logbook.compat.LoggingHandler(): logbook_logger.warn("This goes to logging") - pieces = out.getvalue().strip().split(':') - assert pieces == ['testlogger', 'WARNING', 'This goes to logging'] + pieces = out.getvalue().strip().split(":") + assert pieces == ["testlogger", "WARNING", "This goes to logging"] finally: logger.handlers[:] = old_handlers @@ -74,17 +75,18 @@ def test_redirect_logbook(): def test_warning_redirections(): from logbook.compat import redirected_warnings + with logbook.TestHandler() as handler: redirector = redirected_warnings() redirector.start() try: from warnings import resetwarnings, warn + resetwarnings() - warn(RuntimeWarning('Testing' + str(next(test_warning_redirections_i)))) + warn(RuntimeWarning("Testing" + str(next(test_warning_redirections_i)))) finally: redirector.end() assert len(handler.records) == 1 - assert handler.formatted_records[0].startswith( - '[WARNING] RuntimeWarning: Testing') + assert handler.formatted_records[0].startswith("[WARNING] RuntimeWarning: Testing") assert __file_without_pyc__ in handler.records[0].filename diff --git a/tests/test_logging_times.py b/tests/test_logging_times.py index 844a488..006572b 100644 --- a/tests/test_logging_times.py +++ b/tests/test_logging_times.py @@ -11,35 +11,38 @@ def test_timedate_format(activation_strategy, logger): """ tests the logbook.set_datetime_format() function """ - FORMAT_STRING = '{record.time:%H:%M:%S.%f} {record.message}' + FORMAT_STRING = "{record.time:%H:%M:%S.%f} {record.message}" handler = logbook.TestHandler(format_string=FORMAT_STRING) with activation_strategy(handler): - logbook.set_datetime_format('utc') + logbook.set_datetime_format("utc") try: - logger.warn('This is a warning.') + logger.warn("This is a warning.") time_utc = handler.records[0].time - logbook.set_datetime_format('local') - logger.warn('This is a warning.') + logbook.set_datetime_format("local") + logger.warn("This is a warning.") time_local = handler.records[1].time finally: # put back the default time factory - logbook.set_datetime_format('utc') + logbook.set_datetime_format("utc") # get the expected difference between local and utc time t1 = datetime.now() t2 = datetime.utcnow() - tz_minutes_diff = get_total_delta_seconds(t1 - t2)/60.0 + tz_minutes_diff = get_total_delta_seconds(t1 - t2) / 60.0 if abs(tz_minutes_diff) < 1: - pytest.skip('Cannot test utc/localtime differences ' - 'if they vary by less than one minute...') + pytest.skip( + "Cannot test utc/localtime differences " + "if they vary by less than one minute..." + ) # get the difference between LogRecord local and utc times - logbook_minutes_diff = get_total_delta_seconds(time_local - time_utc)/60.0 + logbook_minutes_diff = get_total_delta_seconds(time_local - time_utc) / 60.0 assert abs(logbook_minutes_diff) > 1, ( - 'Localtime does not differ from UTC by more than 1 ' - 'minute (Local: %s, UTC: %s)' % (time_local, time_utc)) + "Localtime does not differ from UTC by more than 1 " + "minute (Local: %s, UTC: %s)" % (time_local, time_utc) + ) ratio = logbook_minutes_diff / tz_minutes_diff @@ -51,11 +54,14 @@ def test_tz_aware(activation_strategy, logger): """ tests logbook.set_datetime_format() with a time zone aware time factory """ + class utc(tzinfo): def tzname(self, dt): - return 'UTC' + return "UTC" + def utcoffset(self, dt): return timedelta(seconds=0) + def dst(self, dt): return timedelta(seconds=0) @@ -64,16 +70,16 @@ def dst(self, dt): def utc_tz(): return datetime.now(tz=utc) - FORMAT_STRING = '{record.time:%H:%M:%S.%f%z} {record.message}' + FORMAT_STRING = "{record.time:%H:%M:%S.%f%z} {record.message}" handler = logbook.TestHandler(format_string=FORMAT_STRING) with activation_strategy(handler): logbook.set_datetime_format(utc_tz) try: - logger.warn('this is a warning.') + logger.warn("this is a warning.") record = handler.records[0] finally: # put back the default time factory - logbook.set_datetime_format('utc') + logbook.set_datetime_format("utc") assert record.time.tzinfo is not None @@ -82,6 +88,7 @@ def test_invalid_time_factory(): """ tests logbook.set_datetime_format() with an invalid time factory callable """ + def invalid_factory(): return False @@ -90,6 +97,6 @@ def invalid_factory(): logbook.set_datetime_format(invalid_factory) finally: # put back the default time factory - logbook.set_datetime_format('utc') + logbook.set_datetime_format("utc") - assert 'Invalid callable value' in str(e.value) + assert "Invalid callable value" in str(e.value) diff --git a/tests/test_mail_handler.py b/tests/test_mail_handler.py index f55e77a..41ee507 100644 --- a/tests/test_mail_handler.py +++ b/tests/test_mail_handler.py @@ -13,20 +13,20 @@ from unittest.mock import Mock, call, patch __file_without_pyc__ = __file__ -if __file_without_pyc__.endswith('.pyc'): +if __file_without_pyc__.endswith(".pyc"): __file_without_pyc__ = __file_without_pyc__[:-1] def test_mail_handler(activation_strategy, logger): - subject = u('\xf8nicode') + subject = u("\xf8nicode") handler = make_fake_mail_handler(subject=subject) with capturing_stderr_context() as fallback: with activation_strategy(handler): - logger.warn('This is not mailed') + logger.warn("This is not mailed") try: 1 / 0 except Exception: - logger.exception(u('Viva la Espa\xf1a')) + logger.exception(u("Viva la Espa\xf1a")) if not handler.mails: # if sending the mail failed, the reason should be on stderr @@ -34,195 +34,207 @@ def test_mail_handler(activation_strategy, logger): assert len(handler.mails) == 1 sender, receivers, mail = handler.mails[0] - mail = mail.replace('\r', '') + mail = mail.replace("\r", "") assert sender == handler.from_addr - assert '=?utf-8?q?=C3=B8nicode?=' in mail - header, data = mail.split('\n\n', 1) - if 'Content-Transfer-Encoding: base64' in header: - data = base64.b64decode(data).decode('utf-8') - assert re.search(r'Message type:\s+ERROR', data) - assert re.search(r'Location:.*%s' % - re.escape(__file_without_pyc__), data) - assert re.search(r'Module:\s+%s' % __name__, data) - assert re.search(r'Function:\s+test_mail_handler', data) - body = u('Viva la Espa\xf1a') + assert "=?utf-8?q?=C3=B8nicode?=" in mail + header, data = mail.split("\n\n", 1) + if "Content-Transfer-Encoding: base64" in header: + data = base64.b64decode(data).decode("utf-8") + assert re.search(r"Message type:\s+ERROR", data) + assert re.search(r"Location:.*%s" % re.escape(__file_without_pyc__), data) + assert re.search(r"Module:\s+%s" % __name__, data) + assert re.search(r"Function:\s+test_mail_handler", data) + body = u("Viva la Espa\xf1a") if sys.version_info < (3, 0): - body = body.encode('utf-8') + body = body.encode("utf-8") assert body in data - assert '\nTraceback (most' in data - assert '1 / 0' in data - assert 'This is not mailed' in fallback.getvalue() + assert "\nTraceback (most" in data + assert "1 / 0" in data + assert "This is not mailed" in fallback.getvalue() def test_mail_handler_batching(activation_strategy, logger): mail_handler = make_fake_mail_handler() handler = logbook.FingersCrossedHandler(mail_handler, reset=True) with activation_strategy(handler): - logger.warn('Testing') - logger.debug('Even more') - logger.error('And this triggers it') - logger.info('Aha') - logger.error('And this triggers it again!') + logger.warn("Testing") + logger.debug("Even more") + logger.error("And this triggers it") + logger.info("Aha") + logger.error("And this triggers it again!") assert len(mail_handler.mails) == 2 mail = mail_handler.mails[0][2] - pieces = mail.split('Log records that led up to this one:') + pieces = mail.split("Log records that led up to this one:") assert len(pieces) == 2 body, rest = pieces - rest = rest.replace('\r', '') + rest = rest.replace("\r", "") - assert re.search(r'Message type:\s+ERROR', body) - assert re.search(r'Module:\s+%s' % __name__, body) - assert re.search(r'Function:\s+test_mail_handler_batching', body) + assert re.search(r"Message type:\s+ERROR", body) + assert re.search(r"Module:\s+%s" % __name__, body) + assert re.search(r"Function:\s+test_mail_handler_batching", body) - related = rest.strip().split('\n\n') + related = rest.strip().split("\n\n") assert len(related) == 2 - assert re.search(r'Message type:\s+WARNING', related[0]) - assert re.search(r'Message type:\s+DEBUG', related[1]) + assert re.search(r"Message type:\s+WARNING", related[0]) + assert re.search(r"Message type:\s+DEBUG", related[1]) - assert 'And this triggers it again' in mail_handler.mails[1][2] + assert "And this triggers it again" in mail_handler.mails[1][2] def test_group_handler_mail_combo(activation_strategy, logger): mail_handler = make_fake_mail_handler(level=logbook.DEBUG) handler = logbook.GroupHandler(mail_handler) with activation_strategy(handler): - logger.error('The other way round') - logger.warn('Testing') - logger.debug('Even more') + logger.error("The other way round") + logger.warn("Testing") + logger.debug("Even more") assert mail_handler.mails == [] assert len(mail_handler.mails) == 1 mail = mail_handler.mails[0][2] - pieces = mail.split('Other log records in the same group:') + pieces = mail.split("Other log records in the same group:") assert len(pieces) == 2 body, rest = pieces - rest = rest.replace('\r', '') + rest = rest.replace("\r", "") - assert re.search(r'Message type:\s+ERROR', body) - assert re.search(r'Module:\s+' + __name__, body) - assert re.search(r'Function:\s+test_group_handler_mail_combo', body) + assert re.search(r"Message type:\s+ERROR", body) + assert re.search(r"Module:\s+" + __name__, body) + assert re.search(r"Function:\s+test_group_handler_mail_combo", body) - related = rest.strip().split('\n\n') + related = rest.strip().split("\n\n") assert len(related) == 2 - assert re.search(r'Message type:\s+WARNING', related[0]) - assert re.search(r'Message type:\s+DEBUG', related[1]) + assert re.search(r"Message type:\s+WARNING", related[0]) + assert re.search(r"Message type:\s+DEBUG", related[1]) def test_mail_handler_arguments(): - with patch('smtplib.SMTP', autospec=True) as mock_smtp: - + with patch("smtplib.SMTP", autospec=True) as mock_smtp: # Test the mail handler with supported arguments before changes to # secure, credentials, and starttls mail_handler = logbook.MailHandler( - from_addr='from@example.com', - recipients='to@example.com', - server_addr=('server.example.com', 465), - credentials=('username', 'password'), - secure=('keyfile', 'certfile')) + from_addr="from@example.com", + recipients="to@example.com", + server_addr=("server.example.com", 465), + credentials=("username", "password"), + secure=("keyfile", "certfile"), + ) mail_handler.get_connection() - assert mock_smtp.call_args == call('server.example.com', 465) + assert mock_smtp.call_args == call("server.example.com", 465) assert mock_smtp.method_calls[1] == call().starttls( - keyfile='keyfile', certfile='certfile') - assert mock_smtp.method_calls[3] == call().login('username', 'password') + keyfile="keyfile", certfile="certfile" + ) + assert mock_smtp.method_calls[3] == call().login("username", "password") # Test secure=() mail_handler = logbook.MailHandler( - from_addr='from@example.com', - recipients='to@example.com', - server_addr=('server.example.com', 465), - credentials=('username', 'password'), - secure=()) + from_addr="from@example.com", + recipients="to@example.com", + server_addr=("server.example.com", 465), + credentials=("username", "password"), + secure=(), + ) mail_handler.get_connection() - assert mock_smtp.call_args == call('server.example.com', 465) - assert mock_smtp.method_calls[5] == call().starttls( - certfile=None, keyfile=None) - assert mock_smtp.method_calls[7] == call().login('username', 'password') + assert mock_smtp.call_args == call("server.example.com", 465) + assert mock_smtp.method_calls[5] == call().starttls(certfile=None, keyfile=None) + assert mock_smtp.method_calls[7] == call().login("username", "password") # Test implicit port with string server_addr, dictionary credentials, # dictionary secure. mail_handler = logbook.MailHandler( - from_addr='from@example.com', - recipients='to@example.com', - server_addr='server.example.com', - credentials={'user': 'username', 'password': 'password'}, - secure={'certfile': 'certfile2', 'keyfile': 'keyfile2'}) + from_addr="from@example.com", + recipients="to@example.com", + server_addr="server.example.com", + credentials={"user": "username", "password": "password"}, + secure={"certfile": "certfile2", "keyfile": "keyfile2"}, + ) mail_handler.get_connection() - assert mock_smtp.call_args == call('server.example.com', 465) + assert mock_smtp.call_args == call("server.example.com", 465) assert mock_smtp.method_calls[9] == call().starttls( - certfile='certfile2', keyfile='keyfile2') + certfile="certfile2", keyfile="keyfile2" + ) assert mock_smtp.method_calls[11] == call().login( - user='username', password='password') + user="username", password="password" + ) # Test secure=True mail_handler = logbook.MailHandler( - from_addr='from@example.com', - recipients='to@example.com', - server_addr=('server.example.com', 465), - credentials=('username', 'password'), - secure=True) + from_addr="from@example.com", + recipients="to@example.com", + server_addr=("server.example.com", 465), + credentials=("username", "password"), + secure=True, + ) mail_handler.get_connection() - assert mock_smtp.call_args == call('server.example.com', 465) + assert mock_smtp.call_args == call("server.example.com", 465) assert mock_smtp.method_calls[13] == call().starttls( - certfile=None, keyfile=None) - assert mock_smtp.method_calls[15] == call().login('username', 'password') + certfile=None, keyfile=None + ) + assert mock_smtp.method_calls[15] == call().login("username", "password") assert len(mock_smtp.method_calls) == 16 # Test secure=False mail_handler = logbook.MailHandler( - from_addr='from@example.com', - recipients='to@example.com', - server_addr=('server.example.com', 465), - credentials=('username', 'password'), - secure=False) + from_addr="from@example.com", + recipients="to@example.com", + server_addr=("server.example.com", 465), + credentials=("username", "password"), + secure=False, + ) mail_handler.get_connection() # starttls not called because we check len of method_calls before and # after this test. - assert mock_smtp.call_args == call('server.example.com', 465) - assert mock_smtp.method_calls[16] == call().login('username', 'password') + assert mock_smtp.call_args == call("server.example.com", 465) + assert mock_smtp.method_calls[16] == call().login("username", "password") assert len(mock_smtp.method_calls) == 17 - with patch('smtplib.SMTP_SSL', autospec=True) as mock_smtp_ssl: + with patch("smtplib.SMTP_SSL", autospec=True) as mock_smtp_ssl: # Test starttls=False mail_handler = logbook.MailHandler( - from_addr='from@example.com', - recipients='to@example.com', - server_addr='server.example.com', - credentials={'user': 'username', 'password': 'password'}, - secure={'certfile': 'certfile', 'keyfile': 'keyfile'}, - starttls=False) + from_addr="from@example.com", + recipients="to@example.com", + server_addr="server.example.com", + credentials={"user": "username", "password": "password"}, + secure={"certfile": "certfile", "keyfile": "keyfile"}, + starttls=False, + ) mail_handler.get_connection() assert mock_smtp_ssl.call_args == call( - 'server.example.com', 465, keyfile='keyfile', certfile='certfile') + "server.example.com", 465, keyfile="keyfile", certfile="certfile" + ) assert mock_smtp_ssl.method_calls[0] == call().login( - user='username', password='password') + user="username", password="password" + ) # Test starttls=False with secure=True mail_handler = logbook.MailHandler( - from_addr='from@example.com', - recipients='to@example.com', - server_addr='server.example.com', - credentials={'user': 'username', 'password': 'password'}, + from_addr="from@example.com", + recipients="to@example.com", + server_addr="server.example.com", + credentials={"user": "username", "password": "password"}, secure=True, - starttls=False) + starttls=False, + ) mail_handler.get_connection() assert mock_smtp_ssl.call_args == call( - 'server.example.com', 465, keyfile=None, certfile=None) + "server.example.com", 465, keyfile=None, certfile=None + ) assert mock_smtp_ssl.method_calls[1] == call().login( - user='username', password='password') + user="username", password="password" + ) diff --git a/tests/test_more.py b/tests/test_more.py index d58758b..8a3a102 100644 --- a/tests/test_more.py +++ b/tests/test_more.py @@ -8,24 +8,25 @@ from .utils import capturing_stderr_context, missing, require_module -@require_module('jinja2') +@require_module("jinja2") def test_jinja_formatter(logger): from logbook.more import JinjaFormatter - fmter = JinjaFormatter('{{ record.channel }}/{{ record.level_name }}') + + fmter = JinjaFormatter("{{ record.channel }}/{{ record.level_name }}") handler = logbook.TestHandler() handler.formatter = fmter with handler: - logger.info('info') - assert 'testlogger/INFO' in handler.formatted_records + logger.info("info") + assert "testlogger/INFO" in handler.formatted_records -@missing('jinja2') +@missing("jinja2") def test_missing_jinja2(): from logbook.more import JinjaFormatter # check the RuntimeError is raised with pytest.raises(RuntimeError): - JinjaFormatter('dummy') + JinjaFormatter("dummy") def test_colorizing_support(logger): @@ -40,64 +41,67 @@ def __init__(self, *args, **kwargs): def stream(self): return self._obj_stream - with TestColorizingHandler(format_string='{record.message}') as handler: + with TestColorizingHandler(format_string="{record.message}") as handler: handler.force_color() - logger.error('An error') - logger.warn('A warning') - logger.debug('A debug message') - lines = handler.stream.getvalue().rstrip('\n').splitlines() + logger.error("An error") + logger.warn("A warning") + logger.debug("A debug message") + lines = handler.stream.getvalue().rstrip("\n").splitlines() assert lines == [ - '\x1b[31;01mAn error\x1b[39;49;00m', - '\x1b[33;01mA warning\x1b[39;49;00m', - '\x1b[37mA debug message\x1b[39;49;00m'] + "\x1b[31;01mAn error\x1b[39;49;00m", + "\x1b[33;01mA warning\x1b[39;49;00m", + "\x1b[37mA debug message\x1b[39;49;00m", + ] - with TestColorizingHandler(format_string='{record.message}') as handler: + with TestColorizingHandler(format_string="{record.message}") as handler: handler.forbid_color() - logger.error('An error') - logger.warn('A warning') - logger.debug('A debug message') - lines = handler.stream.getvalue().rstrip('\n').splitlines() - assert lines == ['An error', 'A warning', 'A debug message'] - + logger.error("An error") + logger.warn("A warning") + logger.debug("A debug message") + lines = handler.stream.getvalue().rstrip("\n").splitlines() + assert lines == ["An error", "A warning", "A debug message"] def test_tagged(default_handler): from logbook.more import TaggingHandler, TaggingLogger + stream = StringIO() second_handler = logbook.StreamHandler(stream) - logger = TaggingLogger('name', ['cmd']) - handler = TaggingHandler(dict( - info=default_handler, - cmd=second_handler, - both=[default_handler, second_handler], - )) + logger = TaggingLogger("name", ["cmd"]) + handler = TaggingHandler( + dict( + info=default_handler, + cmd=second_handler, + both=[default_handler, second_handler], + ) + ) handler.bubble = False with handler: with capturing_stderr_context() as captured: - logger.log('info', 'info message') - logger.log('both', 'all message') - logger.cmd('cmd message') + logger.log("info", "info message") + logger.log("both", "all message") + logger.cmd("cmd message") stderr = captured.getvalue() - assert 'info message' in stderr - assert 'all message' in stderr - assert 'cmd message' not in stderr + assert "info message" in stderr + assert "all message" in stderr + assert "cmd message" not in stderr stringio = stream.getvalue() - assert 'info message' not in stringio - assert 'all message' in stringio - assert 'cmd message' in stringio + assert "info message" not in stringio + assert "all message" in stringio + assert "cmd message" in stringio def test_tagging_logger(default_handler): from logbook import StderrHandler from logbook.more import TaggingLogger - logger = TaggingLogger('tagged', ['a', 'b']) + logger = TaggingLogger("tagged", ["a", "b"]) handler = StderrHandler(format_string="{record.msg}|{record.extra[tags]}") with handler: @@ -115,19 +119,28 @@ def test_tagging_logger(default_handler): def test_external_application_handler(tmpdir, logger): from logbook.more import ExternalApplicationHandler as Handler - fn = tmpdir.join('tempfile') - handler = Handler([sys.executable, '-c', r'''if 1: + + fn = tmpdir.join("tempfile") + handler = Handler( + [ + sys.executable, + "-c", + r"""if 1: f = open({tempfile}, 'w') try: f.write('{{record.message}}\n') finally: f.close() - '''.format(tempfile=repr(str(fn)))]) + """.format( + tempfile=repr(str(fn)) + ), + ] + ) with handler: - logger.error('this is a really bad idea') + logger.error("this is a really bad idea") with fn.open() as rf: contents = rf.read().strip() - assert contents == 'this is a really bad idea' + assert contents == "this is a really bad idea" def test_exception_handler(logger): @@ -135,39 +148,43 @@ def test_exception_handler(logger): with ExceptionHandler(ValueError): with pytest.raises(ValueError) as caught: - logger.info('here i am') - assert 'INFO: testlogger: here i am' in caught.value.args[0] + logger.info("here i am") + assert "INFO: testlogger: here i am" in caught.value.args[0] def test_exception_handler_specific_level(logger): from logbook.more import ExceptionHandler + with logbook.TestHandler() as test_handler: with pytest.raises(ValueError) as caught: - with ExceptionHandler(ValueError, level='WARNING'): - logger.info('this is irrelevant') - logger.warn('here i am') - assert 'WARNING: testlogger: here i am' in caught.value.args[0] - assert 'this is irrelevant' in test_handler.records[0].message + with ExceptionHandler(ValueError, level="WARNING"): + logger.info("this is irrelevant") + logger.warn("here i am") + assert "WARNING: testlogger: here i am" in caught.value.args[0] + assert "this is irrelevant" in test_handler.records[0].message def test_dedup_handler(logger): from logbook.more import DedupHandler + with logbook.TestHandler() as test_handler: with DedupHandler(): - logger.info('foo') - logger.info('bar') - logger.info('foo') + logger.info("foo") + logger.info("bar") + logger.info("foo") assert 2 == len(test_handler.records) - assert 'message repeated 2 times: foo' in test_handler.records[0].message - assert 'message repeated 1 times: bar' in test_handler.records[1].message + assert "message repeated 2 times: foo" in test_handler.records[0].message + assert "message repeated 1 times: bar" in test_handler.records[1].message class TestRiemannHandler: - @require_module("riemann_client") def test_happy_path(self, logger): from logbook.more import RiemannHandler - riemann_handler = RiemannHandler("127.0.0.1", 5555, message_type="test", level=logbook.INFO) + + riemann_handler = RiemannHandler( + "127.0.0.1", 5555, message_type="test", level=logbook.INFO + ) null_handler = logbook.NullHandler() with null_handler.applicationbound(): with riemann_handler: @@ -190,17 +207,21 @@ def test_happy_path(self, logger): @require_module("riemann_client") def test_incorrect_type(self): from logbook.more import RiemannHandler + with pytest.raises(RuntimeError): RiemannHandler("127.0.0.1", 5555, message_type="fancy_type") @require_module("riemann_client") def test_flush(self, logger): from logbook.more import RiemannHandler - riemann_handler = RiemannHandler("127.0.0.1", - 5555, - message_type="test", - flush_threshold=2, - level=logbook.INFO) + + riemann_handler = RiemannHandler( + "127.0.0.1", + 5555, + message_type="test", + flush_threshold=2, + level=logbook.INFO, + ) null_handler = logbook.NullHandler() with null_handler.applicationbound(): with riemann_handler: diff --git a/tests/test_nteventlog_handler.py b/tests/test_nteventlog_handler.py index 9d10cc3..2616a7f 100644 --- a/tests/test_nteventlog_handler.py +++ b/tests/test_nteventlog_handler.py @@ -7,11 +7,13 @@ from .utils import require_module -@require_module('win32con') -@require_module('win32evtlog') -@require_module('win32evtlogutil') -@pytest.mark.skipif(os.environ.get('ENABLE_LOGBOOK_NTEVENTLOG_TESTS') is None, - reason="Don't clutter NT Event Log unless enabled.") +@require_module("win32con") +@require_module("win32evtlog") +@require_module("win32evtlogutil") +@pytest.mark.skipif( + os.environ.get("ENABLE_LOGBOOK_NTEVENTLOG_TESTS") is None, + reason="Don't clutter NT Event Log unless enabled.", +) def test_nteventlog_handler(): from win32con import ( EVENTLOG_ERROR_TYPE, @@ -26,12 +28,12 @@ def test_nteventlog_handler(): ) from win32evtlogutil import SafeFormatMessage - logger = logbook.Logger('Test Logger') + logger = logbook.Logger("Test Logger") - with logbook.NTEventLogHandler('Logbook Test Suite'): - logger.info('The info log message.') - logger.warning('The warning log message.') - logger.error('The error log message.') + with logbook.NTEventLogHandler("Logbook Test Suite"): + logger.info("The info log message.") + logger.warning("The warning log message.") + logger.error("The error log message.") def iter_event_log(handle, flags, offset): while True: @@ -40,19 +42,19 @@ def iter_event_log(handle, flags, offset): if not events: break - handle = OpenEventLog(None, 'Application') + handle = OpenEventLog(None, "Application") flags = EVENTLOG_BACKWARDS_READ | EVENTLOG_SEQUENTIAL_READ for event in iter_event_log(handle, flags, 0): source = str(event.SourceName) - if source == 'Logbook Test Suite': - message = SafeFormatMessage(event, 'Application') - if 'Message Level: INFO' in message: - assert 'The info log message' in message + if source == "Logbook Test Suite": + message = SafeFormatMessage(event, "Application") + if "Message Level: INFO" in message: + assert "The info log message" in message assert event.EventType == EVENTLOG_INFORMATION_TYPE - if 'Message Level: WARNING' in message: - assert 'The warning log message' in message + if "Message Level: WARNING" in message: + assert "The warning log message" in message assert event.EventType == EVENTLOG_WARNING_TYPE - if 'Message Level: ERROR' in message: - assert 'The error log message' in message + if "Message Level: ERROR" in message: + assert "The error log message" in message assert event.EventType == EVENTLOG_ERROR_TYPE diff --git a/tests/test_null_handler.py b/tests/test_null_handler.py index c84dc7d..ac120cb 100644 --- a/tests/test_null_handler.py +++ b/tests/test_null_handler.py @@ -6,31 +6,33 @@ def test_null_handler(activation_strategy, logger): with capturing_stderr_context() as captured: with activation_strategy(logbook.NullHandler()): - with activation_strategy(logbook.TestHandler(level='ERROR')) as handler: - logger.error('An error') - logger.warn('A warning') - assert captured.getvalue() == '' - assert (not handler.has_warning('A warning')) - assert handler.has_error('An error') + with activation_strategy(logbook.TestHandler(level="ERROR")) as handler: + logger.error("An error") + logger.warn("A warning") + assert captured.getvalue() == "" + assert not handler.has_warning("A warning") + assert handler.has_error("An error") def test_blackhole_setting(activation_strategy): null_handler = logbook.NullHandler() heavy_init = logbook.LogRecord.heavy_init with activation_strategy(null_handler): + def new_heavy_init(self): - raise RuntimeError('should not be triggered') + raise RuntimeError("should not be triggered") + logbook.LogRecord.heavy_init = new_heavy_init try: with activation_strategy(null_handler): - logbook.warn('Awesome') + logbook.warn("Awesome") finally: logbook.LogRecord.heavy_init = heavy_init null_handler.bubble = True with capturing_stderr_context() as captured: - logbook.warning('Not a blockhole') - assert captured.getvalue() != '' + logbook.warning("Not a blockhole") + assert captured.getvalue() != "" def test_null_handler_filtering(activation_strategy): @@ -46,5 +48,5 @@ def test_null_handler_filtering(activation_strategy): logger1.warn("1") logger2.warn("2") - assert outer.has_warning('2', channel='2') - assert (not outer.has_warning('1', channel='1')) + assert outer.has_warning("2", channel="2") + assert not outer.has_warning("1", channel="1") diff --git a/tests/test_processors.py b/tests/test_processors.py index 1e8dd29..0aa5532 100644 --- a/tests/test_processors.py +++ b/tests/test_processors.py @@ -7,7 +7,8 @@ def test_handler_filter_after_processor(activation_strategy, logger): handler = make_fake_mail_handler( - format_string=dedent(''' + format_string=dedent( + """ Subject: Application Error for {record.extra[path]} [{record.extra[method]}] Message type: {record.level_name} @@ -21,20 +22,22 @@ def test_handler_filter_after_processor(activation_strategy, logger): Message: {record.message} - ''').lstrip(), - filter=lambda r, h: 'ip' in r.extra, - bubble=False) + """ + ).lstrip(), + filter=lambda r, h: "ip" in r.extra, + bubble=False, + ) class Request: - remote_addr = '127.0.0.1' - method = 'GET' - path = '/index.html' + remote_addr = "127.0.0.1" + method = "GET" + path = "/index.html" def handle_request(request): def inject_extra(record): - record.extra['ip'] = request.remote_addr - record.extra['method'] = request.method - record.extra['path'] = request.path + record.extra["ip"] = request.remote_addr + record.extra["method"] = request.method + record.extra["path"] = request.path processor = logbook.Processor(inject_extra) with activation_strategy(processor): @@ -43,20 +46,21 @@ def inject_extra(record): try: 1 / 0 except Exception: - logger.exception('Exception happened during request') + logger.exception("Exception happened during request") finally: handler.pop_thread() handle_request(Request()) assert len(handler.mails) == 1 mail = handler.mails[0][2] - assert 'Subject: Application Error for /index.html [GET]' in mail - assert '1 / 0' in mail + assert "Subject: Application Error for /index.html [GET]" in mail + assert "1 / 0" in mail def test_handler_processors(activation_strategy, logger): handler = make_fake_mail_handler( - format_string=dedent(''' + format_string=dedent( + """ Subject: Application Error for {record.extra[path]} [{record.extra[method]}] Message type: {record.level_name} @@ -70,18 +74,20 @@ def test_handler_processors(activation_strategy, logger): Message: {record.message} - ''').lstrip()) + """ + ).lstrip() + ) class Request: - remote_addr = '127.0.0.1' - method = 'GET' - path = '/index.html' + remote_addr = "127.0.0.1" + method = "GET" + path = "/index.html" def handle_request(request): def inject_extra(record): - record.extra['ip'] = request.remote_addr - record.extra['method'] = request.method - record.extra['path'] = request.path + record.extra["ip"] = request.remote_addr + record.extra["method"] = request.method + record.extra["path"] = request.path processor = logbook.Processor(inject_extra) with activation_strategy(processor): @@ -90,12 +96,12 @@ def inject_extra(record): try: 1 / 0 except Exception: - logger.exception('Exception happened during request') + logger.exception("Exception happened during request") finally: handler.pop_thread() handle_request(Request()) assert len(handler.mails) == 1 mail = handler.mails[0][2] - assert 'Subject: Application Error for /index.html [GET]' in mail - assert '1 / 0' in mail + assert "Subject: Application Error for /index.html [GET]" in mail + assert "1 / 0" in mail diff --git a/tests/test_queues.py b/tests/test_queues.py index 1395ba9..fe0a52b 100644 --- a/tests/test_queues.py +++ b/tests/test_queues.py @@ -9,16 +9,16 @@ from .utils import LETTERS, missing, require_module -REDIS_HOST = os.environ.get('REDIS_HOST', 'localhost') -REDIS_PORT = int(os.environ.get('REDIS_PORT', '6379')) +REDIS_HOST = os.environ.get("REDIS_HOST", "localhost") +REDIS_PORT = int(os.environ.get("REDIS_PORT", "6379")) -@require_module('zmq') +@require_module("zmq") def test_zeromq_handler(logger, handlers, subscriber): tests = [ - u('Logging something'), - u('Something with umlauts Ć¤Ć¶Ć¼'), - u('Something else for good measure'), + u("Logging something"), + u("Something with umlauts Ć¤Ć¶Ć¼"), + u("Something else for good measure"), ] for test in tests: for handler in handlers: @@ -29,15 +29,15 @@ def test_zeromq_handler(logger, handlers, subscriber): assert record.channel == logger.name -@require_module('zmq') +@require_module("zmq") def test_zeromq_background_thread(logger, handlers, subscriber): test_handler = logbook.TestHandler() controller = subscriber.dispatch_in_background(test_handler) for handler in handlers: with handler: - logger.warn('This is a warning') - logger.error('This is an error') + logger.warn("This is a warning") + logger.error("This is an error") # stop the controller. This will also stop the loop and join the # background process. Before that we give it a fraction of a second @@ -45,17 +45,18 @@ def test_zeromq_background_thread(logger, handlers, subscriber): time.sleep(0.5) controller.stop() - assert test_handler.has_warning('This is a warning') - assert test_handler.has_error('This is an error') + assert test_handler.has_warning("This is a warning") + assert test_handler.has_error("This is an error") -@missing('zmq') +@missing("zmq") def test_missing_zeromq(): from logbook.queues import ZeroMQHandler, ZeroMQSubscriber + with pytest.raises(RuntimeError): - ZeroMQHandler('tcp://127.0.0.1:42000') + ZeroMQHandler("tcp://127.0.0.1:42000") with pytest.raises(RuntimeError): - ZeroMQSubscriber('tcp://127.0.0.1:42000') + ZeroMQSubscriber("tcp://127.0.0.1:42000") class MultiProcessingHandlerSendBack: @@ -64,21 +65,23 @@ def __init__(self, queue): def __call__(self): from logbook.queues import MultiProcessingHandler + handler = MultiProcessingHandler(self.queue) handler.push_thread() try: - logbook.warn('Hello World') + logbook.warn("Hello World") finally: handler.pop_thread() -@require_module('multiprocessing') +@require_module("multiprocessing") def test_multi_processing_handler(): - if os.getenv('APPVEYOR') == 'True': - pytest.skip('Test hangs on AppVeyor CI') + if os.getenv("APPVEYOR") == "True": + pytest.skip("Test hangs on AppVeyor CI") from multiprocessing import Process, Queue from logbook.queues import MultiProcessingSubscriber + queue = Queue(-1) test_handler = logbook.TestHandler() subscriber = MultiProcessingSubscriber(queue) @@ -89,7 +92,7 @@ def test_multi_processing_handler(): with test_handler: subscriber.dispatch_once() - assert test_handler.has_warning('Hello World') + assert test_handler.has_warning("Hello World") class BatchTestHandler(logbook.TestHandler): @@ -109,80 +112,89 @@ def emit_batch(self, records, reason): def test_threaded_wrapper_handler(logger): from logbook.queues import ThreadedWrapperHandler + test_handler = BatchTestHandler() with ThreadedWrapperHandler(test_handler) as handler: - logger.warn('Just testing') - logger.error('More testing') + logger.warn("Just testing") + logger.error("More testing") # give it some time to sync up handler.close() - assert (not handler.controller.running) + assert not handler.controller.running assert len(test_handler.records) == 2 assert len(test_handler.batches) == 2 assert all(len(records) == 1 for records in test_handler.batches) - assert test_handler.has_warning('Just testing') - assert test_handler.has_error('More testing') + assert test_handler.has_warning("Just testing") + assert test_handler.has_error("More testing") def test_threaded_wrapper_handler_emit(): from logbook.queues import ThreadedWrapperHandler + test_handler = BatchTestHandler() with ThreadedWrapperHandler(test_handler) as handler: - lr = logbook.LogRecord('Test Logger', logbook.WARNING, 'Just testing') + lr = logbook.LogRecord("Test Logger", logbook.WARNING, "Just testing") test_handler.emit(lr) - lr = logbook.LogRecord('Test Logger', logbook.ERROR, 'More testing') + lr = logbook.LogRecord("Test Logger", logbook.ERROR, "More testing") test_handler.emit(lr) # give it some time to sync up handler.close() - assert (not handler.controller.running) + assert not handler.controller.running assert len(test_handler.records) == 2 assert len(test_handler.batches) == 2 assert all(len(records) == 1 for records in test_handler.batches) - assert test_handler.has_warning('Just testing') - assert test_handler.has_error('More testing') + assert test_handler.has_warning("Just testing") + assert test_handler.has_error("More testing") def test_threaded_wrapper_handler_emit_batched(): from logbook.queues import ThreadedWrapperHandler + test_handler = BatchTestHandler() with ThreadedWrapperHandler(test_handler) as handler: - test_handler.emit_batch([ - logbook.LogRecord('Test Logger', logbook.WARNING, 'Just testing'), - logbook.LogRecord('Test Logger', logbook.ERROR, 'More testing'), - ], 'group') + test_handler.emit_batch( + [ + logbook.LogRecord("Test Logger", logbook.WARNING, "Just testing"), + logbook.LogRecord("Test Logger", logbook.ERROR, "More testing"), + ], + "group", + ) # give it some time to sync up handler.close() - assert (not handler.controller.running) + assert not handler.controller.running assert len(test_handler.records) == 2 assert len(test_handler.batches) == 1 - (records, ) = test_handler.batches + (records,) = test_handler.batches assert len(records) == 2 - assert test_handler.has_warning('Just testing') - assert test_handler.has_error('More testing') + assert test_handler.has_warning("Just testing") + assert test_handler.has_error("More testing") -@require_module('execnet') +@require_module("execnet") def test_execnet_handler(): def run_on_remote(channel): import logbook from logbook.queues import ExecnetChannelHandler + handler = ExecnetChannelHandler(channel) - log = logbook.Logger('Execnet') + log = logbook.Logger("Execnet") handler.push_application() - log.info('Execnet works') + log.info("Execnet works") import execnet + gw = execnet.makegateway() channel = gw.remote_exec(run_on_remote) from logbook.queues import ExecnetChannelSubscriber + subscriber = ExecnetChannelSubscriber(channel) record = subscriber.recv() - assert record.msg == 'Execnet works' + assert record.msg == "Execnet works" gw.exit() @@ -193,43 +205,44 @@ def __init__(self, message, queue): def __call__(self): from logbook.queues import MultiProcessingHandler + with MultiProcessingHandler(self.queue): logbook.warn(self.message) -@require_module('multiprocessing') +@require_module("multiprocessing") def test_subscriber_group(): - if os.getenv('APPVEYOR') == 'True': - pytest.skip('Test hangs on AppVeyor CI') + if os.getenv("APPVEYOR") == "True": + pytest.skip("Test hangs on AppVeyor CI") from multiprocessing import Process, Queue from logbook.queues import MultiProcessingSubscriber, SubscriberGroup + a_queue = Queue(-1) b_queue = Queue(-1) - subscriber = SubscriberGroup([ - MultiProcessingSubscriber(a_queue), - MultiProcessingSubscriber(b_queue) - ]) + subscriber = SubscriberGroup( + [MultiProcessingSubscriber(a_queue), MultiProcessingSubscriber(b_queue)] + ) for _ in range(10): - p1 = Process(target=SubscriberGroupSendBack('foo', a_queue)) - p2 = Process(target=SubscriberGroupSendBack('bar', b_queue)) + p1 = Process(target=SubscriberGroupSendBack("foo", a_queue)) + p2 = Process(target=SubscriberGroupSendBack("bar", b_queue)) p1.start() p2.start() p1.join() p2.join() messages = [subscriber.recv().message for i in (1, 2)] - assert sorted(messages) == ['bar', 'foo'] + assert sorted(messages) == ["bar", "foo"] -@require_module('redis') +@require_module("redis") def test_redis_handler(): import redis from logbook.queues import RedisHandler - KEY = f'redis-{os.getpid()}' - FIELDS = ['message', 'host'] + KEY = f"redis-{os.getpid()}" + FIELDS = ["message", "host"] r = redis.Redis(REDIS_HOST, REDIS_PORT, decode_responses=True) redis_handler = RedisHandler(key=KEY, level=logbook.INFO, bubble=True) # We don't want output for the tests, so we can wrap everything in a @@ -249,7 +262,7 @@ def test_redis_handler(): assert message.find(LETTERS) # Change the key of the handler and check on redis - KEY = f'test_another_key-{os.getpid()}' + KEY = f"test_another_key-{os.getpid()}" redis_handler.key = KEY with null_handler.applicationbound(): @@ -260,11 +273,12 @@ def test_redis_handler(): assert key == KEY # Check that extra fields are added if specified when creating the handler - FIELDS.append('type') - extra_fields = {'type': 'test'} - del(redis_handler) - redis_handler = RedisHandler(key=KEY, level=logbook.INFO, - extra_fields=extra_fields, bubble=True) + FIELDS.append("type") + extra_fields = {"type": "test"} + del redis_handler + redis_handler = RedisHandler( + key=KEY, level=logbook.INFO, extra_fields=extra_fields, bubble=True + ) with null_handler.applicationbound(): with redis_handler: @@ -273,22 +287,22 @@ def test_redis_handler(): key, message = r.blpop(KEY) for field in FIELDS: assert message.find(field) - assert message.find('test') + assert message.find("test") # And finally, check that fields are correctly added if appended to the # log message - FIELDS.append('more_info') + FIELDS.append("more_info") with null_handler.applicationbound(): with redis_handler: - logbook.info(LETTERS, more_info='This works') + logbook.info(LETTERS, more_info="This works") key, message = r.blpop(KEY) for field in FIELDS: assert message.find(field) - assert message.find('This works') + assert message.find("This works") -@require_module('redis') +@require_module("redis") def test_redis_handler_lpush(): """ Test if lpush stores messages in the right order @@ -297,11 +311,13 @@ def test_redis_handler_lpush(): import redis from logbook.queues import RedisHandler + null_handler = logbook.NullHandler() - KEY = f'lpushed-' - redis_handler = RedisHandler(key=KEY, push_method='lpush', - level=logbook.INFO, bubble=True) + KEY = f"lpushed-" + redis_handler = RedisHandler( + key=KEY, push_method="lpush", level=logbook.INFO, bubble=True + ) with null_handler.applicationbound(): with redis_handler: @@ -317,7 +333,7 @@ def test_redis_handler_lpush(): r.delete(KEY) -@require_module('redis') +@require_module("redis") def test_redis_handler_rpush(): """ Test if rpush stores messages in the right order @@ -326,11 +342,13 @@ def test_redis_handler_rpush(): import redis from logbook.queues import RedisHandler + null_handler = logbook.NullHandler() - KEY = 'rpushed-' + str(os.getpid()) - redis_handler = RedisHandler(key=KEY, push_method='rpush', - level=logbook.INFO, bubble=True) + KEY = "rpushed-" + str(os.getpid()) + redis_handler = RedisHandler( + key=KEY, push_method="rpush", level=logbook.INFO, bubble=True + ) with null_handler.applicationbound(): with redis_handler: @@ -362,12 +380,12 @@ def handlers_subscriber(multi): # Get an unused port tempsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - tempsock.bind(('127.0.0.1', 0)) + tempsock.bind(("127.0.0.1", 0)) host, unused_port = tempsock.getsockname() tempsock.close() # Retrieve the ZeroMQ handler and subscriber - uri = 'tcp://%s:%d' % (host, unused_port) + uri = "tcp://%s:%d" % (host, unused_port) if multi: handlers = [ZeroMQHandler(uri, multi=True) for _ in range(3)] else: diff --git a/tests/test_syslog_handler.py b/tests/test_syslog_handler.py index eb5ef4b..d8cabc7 100644 --- a/tests/test_syslog_handler.py +++ b/tests/test_syslog_handler.py @@ -9,26 +9,26 @@ UNIX_SOCKET = "/tmp/__unixsock_logbook.test" -DELIMITERS = { - socket.AF_INET: '\n' -} +DELIMITERS = {socket.AF_INET: "\n"} TO_TEST = [ - (socket.AF_INET, socket.SOCK_DGRAM, ('127.0.0.1', 0)), - (socket.AF_INET, socket.SOCK_STREAM, ('127.0.0.1', 0)), + (socket.AF_INET, socket.SOCK_DGRAM, ("127.0.0.1", 0)), + (socket.AF_INET, socket.SOCK_STREAM, ("127.0.0.1", 0)), ] -UNIX_SOCKET_AVAILABLE = hasattr(socket, 'AF_UNIX') +UNIX_SOCKET_AVAILABLE = hasattr(socket, "AF_UNIX") if UNIX_SOCKET_AVAILABLE: - DELIMITERS[socket.AF_UNIX] = '\x00' + DELIMITERS[socket.AF_UNIX] = "\x00" TO_TEST.append((socket.AF_UNIX, socket.SOCK_DGRAM, UNIX_SOCKET)) @pytest.mark.usefixtures("unix_sock_path") @pytest.mark.parametrize("sock_family,socktype,address", TO_TEST) -@pytest.mark.parametrize("app_name", [None, 'Testing']) -def test_syslog_handler(logger, activation_strategy, sock_family, socktype, address, app_name): +@pytest.mark.parametrize("app_name", [None, "Testing"]) +def test_syslog_handler( + logger, activation_strategy, sock_family, socktype, address, app_name +): delimiter = DELIMITERS[sock_family] with closing(socket.socket(sock_family, socktype)) as inc: inc.bind(address) @@ -39,18 +39,25 @@ def test_syslog_handler(logger, activation_strategy, sock_family, socktype, addr inc.settimeout(1) if UNIX_SOCKET_AVAILABLE and sock_family == socket.AF_UNIX: - expected = (r'^<12>{}testlogger: Syslog is weird{}$'.format(app_name + ':' if app_name else '', delimiter)) + expected = r"^<12>{}testlogger: Syslog is weird{}$".format( + app_name + ":" if app_name else "", delimiter + ) else: - expected = (r'^<12>1 \d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?Z %s %s %d - - %sSyslog is weird%s$' % ( - socket.gethostname(), - app_name if app_name else 'testlogger', - os.getpid(), 'testlogger: ' if app_name else '', - delimiter)) + expected = ( + r"^<12>1 \d{4}-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d+)?Z %s %s %d - - %sSyslog is weird%s$" + % ( + socket.gethostname(), + app_name if app_name else "testlogger", + os.getpid(), + "testlogger: " if app_name else "", + delimiter, + ) + ) handler = logbook.SyslogHandler(app_name, inc.getsockname(), socktype=socktype) with activation_strategy(handler): - logger.warn('Syslog is weird') + logger.warn("Syslog is weird") if socktype == socket.SOCK_STREAM: with closing(inc.accept()[0]) as inc2: @@ -58,9 +65,8 @@ def test_syslog_handler(logger, activation_strategy, sock_family, socktype, addr else: rv = inc.recvfrom(1024)[0] - rv = rv.decode('utf-8') - assert re.match(expected, rv), \ - f'expected {expected}, got {rv}' + rv = rv.decode("utf-8") + assert re.match(expected, rv), f"expected {expected}, got {rv}" @pytest.fixture diff --git a/tests/test_test_handler.py b/tests/test_test_handler.py index 4d651fa..fed825f 100644 --- a/tests/test_test_handler.py +++ b/tests/test_test_handler.py @@ -3,46 +3,52 @@ import pytest -@pytest.mark.parametrize("level, method", [ - ("trace", "has_traces"), - ("debug", "has_debugs"), - ("info", "has_infos"), - ("notice", "has_notices"), - ("warning", "has_warnings"), - ("error", "has_errors"), - ("critical", "has_criticals"), -]) +@pytest.mark.parametrize( + "level, method", + [ + ("trace", "has_traces"), + ("debug", "has_debugs"), + ("info", "has_infos"), + ("notice", "has_notices"), + ("warning", "has_warnings"), + ("error", "has_errors"), + ("critical", "has_criticals"), + ], +) def test_has_level(active_handler, logger, level, method): log = getattr(logger, level) - log('Hello World') + log("Hello World") assert getattr(active_handler, method) -@pytest.mark.parametrize("level, method", [ - ("trace", "has_trace"), - ("debug", "has_debug"), - ("info", "has_info"), - ("notice", "has_notice"), - ("warning", "has_warning"), - ("error", "has_error"), - ("critical", "has_critical"), -]) +@pytest.mark.parametrize( + "level, method", + [ + ("trace", "has_trace"), + ("debug", "has_debug"), + ("info", "has_info"), + ("notice", "has_notice"), + ("warning", "has_warning"), + ("error", "has_error"), + ("critical", "has_critical"), + ], +) def test_regex_matching(active_handler, logger, level, method): log = getattr(logger, level) - log('Hello World') + log("Hello World") has_level_method = getattr(active_handler, method) - assert has_level_method(re.compile('^Hello')) - assert (not has_level_method(re.compile('world$'))) - assert (not has_level_method('^Hello World')) + assert has_level_method(re.compile("^Hello")) + assert not has_level_method(re.compile("world$")) + assert not has_level_method("^Hello World") def test_test_handler_cache(active_handler, logger): - logger.warn('First line') + logger.warn("First line") assert len(active_handler.formatted_records) == 1 # store cache, to make sure it is identifiable cache = active_handler.formatted_records assert len(active_handler.formatted_records) == 1 assert cache is active_handler.formatted_records - logger.warn('Second line invalidates cache') + logger.warn("Second line invalidates cache") assert len(active_handler.formatted_records) == 2 - assert (cache is not active_handler.formatted_records) + assert cache is not active_handler.formatted_records diff --git a/tests/test_ticketing.py b/tests/test_ticketing.py index 209065f..276ac12 100644 --- a/tests/test_ticketing.py +++ b/tests/test_ticketing.py @@ -21,18 +21,20 @@ @pytest.mark.xfail( - os.name == 'nt' and (python_version == (3, 2) or python_version == (3, 3)), - reason='Problem with in-memory sqlite on Python 3.2, 3.3 and Windows') -@require_module('sqlalchemy') + os.name == "nt" and (python_version == (3, 2) or python_version == (3, 3)), + reason="Problem with in-memory sqlite on Python 3.2, 3.3 and Windows", +) +@require_module("sqlalchemy") def test_basic_ticketing(logger): from time import sleep from logbook.ticketing import TicketingHandler - with TicketingHandler('sqlite:///') as handler: + + with TicketingHandler("sqlite:///") as handler: for x in xrange(5): - logger.warn('A warning') + logger.warn("A warning") sleep(0.2) - logger.info('An error') + logger.info("An error") sleep(0.2) if x < 2: try: @@ -58,15 +60,14 @@ def test_basic_ticketing(logger): ticket = handler.db.get_ticket(tickets[1].ticket_id) assert ticket == tickets[1] - occurrences = handler.db.get_occurrences(tickets[2].ticket_id, - order_by='time') + occurrences = handler.db.get_occurrences(tickets[2].ticket_id, order_by="time") assert len(occurrences) == 2 record = occurrences[0] assert __file_without_pyc__ in record.filename # avoid 2to3 destroying our assertion - assert getattr(record, 'func_name') == 'test_basic_ticketing' + assert getattr(record, "func_name") == "test_basic_ticketing" assert record.level == logbook.ERROR assert record.thread == get_ident() assert record.process == os.getpid() - assert record.channel == 'testlogger' - assert '1 / 0' in record.formatted_exception + assert record.channel == "testlogger" + assert "1 / 0" in record.formatted_exception diff --git a/tests/test_unicode.py b/tests/test_unicode.py index c716e3a..a274685 100644 --- a/tests/test_unicode.py +++ b/tests/test_unicode.py @@ -6,16 +6,16 @@ @require_py3 def test_default_format_unicode(logger): with capturing_stderr_context() as stream: - logger.warn('\u2603') - assert 'WARNING: testlogger: \u2603' in stream.getvalue() + logger.warn("\u2603") + assert "WARNING: testlogger: \u2603" in stream.getvalue() @require_py3 def test_default_format_encoded(logger): with capturing_stderr_context() as stream: # it's a string but it's in the right encoding so don't barf - logger.warn('\u2603') - assert 'WARNING: testlogger: \u2603' in stream.getvalue() + logger.warn("\u2603") + assert "WARNING: testlogger: \u2603" in stream.getvalue() @require_py3 @@ -23,29 +23,27 @@ def test_default_format_bad_encoding(logger): with capturing_stderr_context() as stream: # it's a string, is wrong, but just dump it in the logger, # don't try to decode/encode it - logger.warn('Š ŃƒŃŃŠŗŠøŠ¹'.encode('koi8-r')) + logger.warn("Š ŃƒŃŃŠŗŠøŠ¹".encode("koi8-r")) expected = "WARNING: testlogger: b'\\xf2\\xd5\\xd3\\xd3\\xcb\\xc9\\xca'" assert expected in stream.getvalue() @require_py3 def test_custom_unicode_format_unicode(logger): - format_string = ('[{record.level_name}] ' - '{record.channel}: {record.message}') + format_string = "[{record.level_name}] " "{record.channel}: {record.message}" with capturing_stderr_context() as stream: with logbook.StderrHandler(format_string=format_string): logger.warn("\u2603") - assert '[WARNING] testlogger: \u2603' in stream.getvalue() + assert "[WARNING] testlogger: \u2603" in stream.getvalue() @require_py3 def test_custom_string_format_unicode(logger): - format_string = ('[{record.level_name}] ' - '{record.channel}: {record.message}') + format_string = "[{record.level_name}] " "{record.channel}: {record.message}" with capturing_stderr_context() as stream: with logbook.StderrHandler(format_string=format_string): - logger.warn('\u2603') - assert '[WARNING] testlogger: \u2603' in stream.getvalue() + logger.warn("\u2603") + assert "[WARNING] testlogger: \u2603" in stream.getvalue() @require_py3 @@ -58,5 +56,5 @@ def test_unicode_message_encoded_params(logger): @require_py3 def test_encoded_message_unicode_params(logger): with capturing_stderr_context() as stream: - logger.warn('\u2603 {0}'.encode(), '\u2603') - assert 'WARNING: testlogger: \u2603 \u2603' in stream.getvalue() + logger.warn("\u2603 {0}".encode(), "\u2603") + assert "WARNING: testlogger: \u2603 \u2603" in stream.getvalue() diff --git a/tests/test_utils.py b/tests/test_utils.py index 9a4128f..b1f21d6 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -21,16 +21,16 @@ def test_logged_if_slow_reached(test_handler): with test_handler.applicationbound(): - with logged_if_slow('checking...', threshold=_THRESHOLD): + with logged_if_slow("checking...", threshold=_THRESHOLD): sleep(2 * _THRESHOLD) assert len(test_handler.records) == 1 [record] = test_handler.records - assert record.message == 'checking...' + assert record.message == "checking..." def test_logged_if_slow_did_not_reached(test_handler): with test_handler.applicationbound(): - with logged_if_slow('checking...', threshold=_THRESHOLD): + with logged_if_slow("checking...", threshold=_THRESHOLD): sleep(_THRESHOLD / 2) assert len(test_handler.records) == 0 @@ -38,16 +38,15 @@ def test_logged_if_slow_did_not_reached(test_handler): def test_logged_if_slow_logger(): logger = Mock() - with logged_if_slow('checking...', threshold=_THRESHOLD, logger=logger): + with logged_if_slow("checking...", threshold=_THRESHOLD, logger=logger): sleep(2 * _THRESHOLD) - assert logger.log.call_args == call(logbook.DEBUG, 'checking...') + assert logger.log.call_args == call(logbook.DEBUG, "checking...") def test_logged_if_slow_level(test_handler): with test_handler.applicationbound(): - with logged_if_slow('checking...', threshold=_THRESHOLD, - level=logbook.WARNING): + with logged_if_slow("checking...", threshold=_THRESHOLD, level=logbook.WARNING): sleep(2 * _THRESHOLD) assert test_handler.records[0].level == logbook.WARNING @@ -55,15 +54,14 @@ def test_logged_if_slow_level(test_handler): def test_logged_if_slow_deprecated(logger, test_handler): with test_handler.applicationbound(): - with logged_if_slow('checking...', threshold=_THRESHOLD, - func=logbook.error): + with logged_if_slow("checking...", threshold=_THRESHOLD, func=logbook.error): sleep(2 * _THRESHOLD) assert test_handler.records[0].level == logbook.ERROR - assert test_handler.records[0].message == 'checking...' + assert test_handler.records[0].message == "checking..." with pytest.raises(TypeError): - logged_if_slow('checking...', logger=logger, func=logger.error) + logged_if_slow("checking...", logger=logger, func=logger.error) def test_deprecated_func_called(capture): @@ -75,11 +73,10 @@ def test_deprecation_message(capture): [record] = capture.records assert "deprecated" in record.message - assert 'deprecated_func' in record.message + assert "deprecated_func" in record.message def test_deprecation_with_message(capture): - @deprecated("use something else instead") def func(a, b): return a + b @@ -92,8 +89,7 @@ def func(a, b): def test_no_deprecations(capture): - - @deprecated('msg') + @deprecated("msg") def func(a, b): return a + b @@ -106,12 +102,10 @@ def _no_decorator(func): return func -@pytest.mark.parametrize('decorator', [_no_decorator, classmethod]) +@pytest.mark.parametrize("decorator", [_no_decorator, classmethod]) def test_class_deprecation(capture, decorator): - class Bla: - - @deprecated('reason') + @deprecated("reason") @classmethod def func(self, a, b): assert isinstance(self, Bla) @@ -120,11 +114,10 @@ def func(self, a, b): assert Bla().func(2, 4) == 6 [record] = capture.records - assert 'Bla.func is deprecated' in record.message + assert "Bla.func is deprecated" in record.message def test_deprecations_different_sources(capture): - def f(): deprecated_func(1, 2) @@ -137,7 +130,6 @@ def g(): def test_deprecations_same_sources(capture): - def f(): deprecated_func(1, 2) @@ -147,12 +139,11 @@ def f(): def test_deprecation_message_different_sources(capture): - def f(flag): if flag: - log_deprecation_message('first message type') + log_deprecation_message("first message type") else: - log_deprecation_message('second message type') + log_deprecation_message("second message type") f(True) f(False) @@ -160,12 +151,11 @@ def f(flag): def test_deprecation_message_same_sources(capture): - def f(flag): if flag: - log_deprecation_message('first message type') + log_deprecation_message("first message type") else: - log_deprecation_message('second message type') + log_deprecation_message("second message type") f(True) f(True) @@ -174,11 +164,12 @@ def f(flag): def test_deprecation_message_full_warning(capture): def f(): - log_deprecation_message('some_message') + log_deprecation_message("some_message") + f() [record] = capture.records - assert record.message == 'Deprecation message: some_message' + assert record.message == "Deprecation message: some_message" def test_name_doc(): @@ -187,36 +178,33 @@ def some_func(): """docstring here""" pass - assert some_func.__name__ == 'some_func' - assert 'docstring here' in some_func.__doc__ + assert some_func.__name__ == "some_func" + assert "docstring here" in some_func.__doc__ def test_doc_update(): - @deprecated('some_message') + @deprecated("some_message") def some_func(): """docstring here""" pass - some_func.__doc__ = 'new_docstring' + some_func.__doc__ = "new_docstring" - assert 'docstring here' not in some_func.__doc__ - assert 'new_docstring' in some_func.__doc__ - assert 'some_message' in some_func.__doc__ + assert "docstring here" not in some_func.__doc__ + assert "new_docstring" in some_func.__doc__ + assert "some_message" in some_func.__doc__ def test_deprecatd_docstring(): - message = "Use something else instead" @deprecated() def some_func(): - """This is a function - """ + """This is a function""" @deprecated(message) def other_func(): - """This is another function - """ + """This is another function""" assert ".. deprecated" in some_func.__doc__ assert f".. deprecated\n {message}" in other_func.__doc__ @@ -230,6 +218,7 @@ def capture(request): @request.addfinalizer def pop(): handler.pop_application() + return handler diff --git a/tests/utils.py b/tests/utils.py index b12ceb1..5ee21d3 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -25,17 +25,18 @@ def get_total_delta_seconds(delta): Replacement for datetime.timedelta.total_seconds() for Python 2.5, 2.6 and 3.1 """ - return (delta.microseconds + (delta.seconds + delta.days * 24 * 3600) * 10**6) / 10**6 + return ( + delta.microseconds + (delta.seconds + delta.days * 24 * 3600) * 10**6 + ) / 10**6 -require_py3 = pytest.mark.skipif( - sys.version_info[0] < 3, reason="Requires Python 3") +require_py3 = pytest.mark.skipif(sys.version_info[0] < 3, reason="Requires Python 3") appveyor = pytest.mark.skipif( - os.environ.get('APPVEYOR') != 'True', reason='AppVeyor CI test') + os.environ.get("APPVEYOR") != "True", reason="AppVeyor CI test" +) -travis = pytest.mark.skipif( - os.environ.get('TRAVIS') != 'true', reason='Travis CI test') +travis = pytest.mark.skipif(os.environ.get("TRAVIS") != "true", reason="Travis CI test") def require_module(module_name): @@ -45,8 +46,7 @@ def require_module(module_name): except ImportError: found = False - return pytest.mark.skipif( - not found, reason=f'Module {module_name} is required') + return pytest.mark.skipif(not found, reason=f"Module {module_name} is required") def make_fake_mail_handler(**kwargs): @@ -62,8 +62,8 @@ def close_connection(self, con): def sendmail(self, fromaddr, recipients, mail): self.mails.append((fromaddr, recipients, mail)) - kwargs.setdefault('level', logbook.ERROR) - return FakeMailHandler('foo@example.com', ['bar@example.com'], **kwargs) + kwargs.setdefault("level", logbook.ERROR) + return FakeMailHandler("foo@example.com", ["bar@example.com"], **kwargs) def missing(name): @@ -79,7 +79,9 @@ def wrapper(*args, **kwargs): del sys.modules[name] else: sys.modules[name] = old + return wrapper + return decorate From 170396304256b2f7c775e931ffd3e939c5dc9f0a Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 25 Jun 2023 22:06:01 +0200 Subject: [PATCH 14/70] Add pre-commit job to GitHub Actions --- .github/workflows/main.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ad26e56..7903019 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -95,3 +95,10 @@ jobs: - name: "Run tox targets for ${{ matrix.python-version }}" run: "python -m tox -- -k 'not redis'" + + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + - uses: pre-commit/action@v3.0.0 From 9c6b38be8d5adac4e487e6152e7631f6c0a555af Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 25 Jun 2023 22:59:00 +0200 Subject: [PATCH 15/70] Fix import order issue --- logbook/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/logbook/__init__.py b/logbook/__init__.py index 1cc8b56..abccb33 100644 --- a/logbook/__init__.py +++ b/logbook/__init__.py @@ -11,7 +11,6 @@ import os -from . import compat from .base import ( CRITICAL, DEBUG, @@ -58,6 +57,8 @@ create_syshandler, ) +from . import compat # isort: skip + # create an anonymous default logger and provide all important # methods of that logger as global functions _default_logger = Logger("Generic") From f30b11114f582a665d94f9387e4e3ee299b6dee0 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 25 Jun 2023 23:16:02 +0200 Subject: [PATCH 16/70] Fix black-formatted implicit string concatenation --- logbook/handlers.py | 10 +++++----- logbook/more.py | 10 +++------- logbook/notifiers.py | 2 +- logbook/queues.py | 10 ++++------ 4 files changed, 13 insertions(+), 19 deletions(-) diff --git a/logbook/handlers.py b/logbook/handlers.py index c276ba2..25a2aab 100644 --- a/logbook/handlers.py +++ b/logbook/handlers.py @@ -787,7 +787,7 @@ def __init__( from brotli import Compressor except ImportError: raise RuntimeError( - "The brotli library is required for " "the BrotliCompressionHandler." + "The brotli library is required for the BrotliCompressionHandler." ) max_window_size = int(math.log(compression_window_size, 2)) @@ -852,7 +852,7 @@ def __init__( self, filename, mode, encoding, level, format_string, delay, filter, bubble ) if os.name == "nt": - raise RuntimeError("MonitoringFileHandler " "does not support Windows") + raise RuntimeError("MonitoringFileHandler does not support Windows") self._query_fd() def _query_fd(self): @@ -938,7 +938,7 @@ def __init__( ) self.max_size = max_size self.backup_count = backup_count - assert backup_count > 0, "at least one backup file has to be " "specified" + assert backup_count > 0, "at least one backup file has to be specified" def should_rollover(self, record, bytes): self.stream.seek(0, 2) @@ -1848,7 +1848,7 @@ def __init__( if os.name != "nt": raise RuntimeError( - "NTLogEventLogHandler requires a Windows " "operating system." + "NTLogEventLogHandler requires a Windows operating system." ) try: @@ -1856,7 +1856,7 @@ def __init__( import win32evtlogutil except ImportError: raise RuntimeError( - "The pywin32 library is required " "for the NTEventLogHandler." + "The pywin32 library is required for the NTEventLogHandler." ) self.application_name = application_name diff --git a/logbook/more.py b/logbook/more.py index 503a0dc..b3e6648 100644 --- a/logbook/more.py +++ b/logbook/more.py @@ -194,7 +194,7 @@ def __init__( import oauth2 except ImportError: raise RuntimeError( - "The python-oauth2 library is required for " "the TwitterHandler." + "The python-oauth2 library is required for the TwitterHandler." ) self._oauth = oauth2 @@ -267,9 +267,7 @@ def __init__( try: from slacker import Slacker except ImportError: - raise RuntimeError( - "The slacker library is required for " "the SlackHandler." - ) + raise RuntimeError("The slacker library is required for the SlackHandler.") self.channel = channel self.slack = Slacker(api_token) @@ -287,9 +285,7 @@ def __init__(self, template): try: from jinja2 import Template except ImportError: - raise RuntimeError( - "The jinja2 library is required for " "the JinjaFormatter." - ) + raise RuntimeError("The jinja2 library is required for the JinjaFormatter.") self.template = Template(template) def __call__(self, record, handler): diff --git a/logbook/notifiers.py b/logbook/notifiers.py index 52cc656..6c6df4e 100644 --- a/logbook/notifiers.py +++ b/logbook/notifiers.py @@ -179,7 +179,7 @@ def __init__( self._pynotify = pynotify except ImportError: raise RuntimeError( - "The pynotify library is required for " "the LibNotifyHandler." + "The pynotify library is required for the LibNotifyHandler." ) self.icon = icon diff --git a/logbook/queues.py b/logbook/queues.py index b68cfcc..cd00c29 100644 --- a/logbook/queues.py +++ b/logbook/queues.py @@ -65,7 +65,7 @@ def __init__( import redis from redis import ResponseError except ImportError: - raise RuntimeError("The redis library is required for " "the RedisHandler") + raise RuntimeError("The redis library is required for the RedisHandler") self.redis = redis.Redis( host=host, port=port, password=password, decode_responses=True @@ -174,7 +174,7 @@ def __init__( import kombu except ImportError: raise RuntimeError( - "The kombu library is required for " "the RabbitMQSubscriber." + "The kombu library is required for the RabbitMQSubscriber." ) if uri: connection = kombu.Connection(uri) @@ -226,9 +226,7 @@ def __init__( try: import zmq except ImportError: - raise RuntimeError( - "The pyzmq library is required for " "the ZeroMQHandler." - ) + raise RuntimeError("The pyzmq library is required for the ZeroMQHandler.") #: the zero mq context self.context = context or zmq.Context() @@ -452,7 +450,7 @@ def __init__(self, uri=None, context=None, multi=False): import zmq except ImportError: raise RuntimeError( - "The pyzmq library is required for " "the ZeroMQSubscriber." + "The pyzmq library is required for the ZeroMQSubscriber." ) self._zmq = zmq From 2719c9e982d5380360d76804d2d67b86308268ff Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 25 Jun 2023 22:24:19 +0200 Subject: [PATCH 17/70] Remove AppVeyor and Travis config files --- .appveyor/after_test.bat | 6 ---- .appveyor/build.cmd | 21 ----------- .appveyor/prepare.bat | 24 ------------- .travis.yml | 65 --------------------------------- appveyor.yml | 77 ---------------------------------------- 5 files changed, 193 deletions(-) delete mode 100644 .appveyor/after_test.bat delete mode 100644 .appveyor/build.cmd delete mode 100644 .appveyor/prepare.bat delete mode 100644 .travis.yml delete mode 100644 appveyor.yml diff --git a/.appveyor/after_test.bat b/.appveyor/after_test.bat deleted file mode 100644 index cf8a22b..0000000 --- a/.appveyor/after_test.bat +++ /dev/null @@ -1,6 +0,0 @@ -IF DEFINED CYBUILD ( - %BUILD% python setup.py bdist_wheel - IF "%APPVEYOR_REPO_TAG%"=="true" ( - twine upload -u %PYPI_USERNAME% -p %PYPI_PASSWORD% dist\*.whl - ) -) diff --git a/.appveyor/build.cmd b/.appveyor/build.cmd deleted file mode 100644 index 243dc9a..0000000 --- a/.appveyor/build.cmd +++ /dev/null @@ -1,21 +0,0 @@ -@echo off -:: To build extensions for 64 bit Python 3, we need to configure environment -:: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of: -:: MS Windows SDK for Windows 7 and .NET Framework 4 -:: -:: More details at: -:: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows - -IF "%DISTUTILS_USE_SDK%"=="1" ( - ECHO Configuring environment to build with MSVC on a 64bit architecture - ECHO Using Windows SDK 7.1 - "C:\Program Files\Microsoft SDKs\Windows\v7.1\Setup\WindowsSdkVer.exe" -q -version:v7.1 - CALL "C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin\SetEnv.cmd" /x64 /release - SET MSSdk=1 - REM Need the following to allow tox to see the SDK compiler - SET TOX_TESTENV_PASSENV=DISTUTILS_USE_SDK MSSdk INCLUDE LIB -) ELSE ( - ECHO Using default MSVC build environment -) - -CALL %* diff --git a/.appveyor/prepare.bat b/.appveyor/prepare.bat deleted file mode 100644 index 1fcec18..0000000 --- a/.appveyor/prepare.bat +++ /dev/null @@ -1,24 +0,0 @@ -pip install -U wheel setuptools || goto :error -nuget install redis-64 -excludeversion || goto :error -redis-64\tools\redis-server.exe --service-install || goto :error -redis-64\tools\redis-server.exe --service-start || goto :error -IF NOT DEFINED SKIPZMQ ( - nuget install ZeroMQ || goto :error -) -IF DEFINED CYBUILD ( - %BUILD% pip install cython twine || goto :error - cython logbook\_speedups.pyx || goto :error -) ELSE ( - set DISABLE_LOGBOOK_CEXT=True -) -IF DEFINED SKIPZMQ ( - %BUILD% pip install -e .[dev,execnet,jinja,sqlalchemy,redis] || goto :error -) ELSE ( - %BUILD% pip install -e .[all] || goto :error -) -REM pypiwin32 can fail, ignore error. -%BUILD% pip install pypiwin32 -exit /b 0 - -:error -exit /b %errorlevel% diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 3ee1867..0000000 --- a/.travis.yml +++ /dev/null @@ -1,65 +0,0 @@ -language: python -dist: xenial -addons: - apt: - sources: - - chris-lea-redis-server - - sourceline: "ppa:chris-lea/zeromq" - packages: - - redis-server - - libzmq3-dev -services: - - redis-server -python: - - "2.7" - - "3.5" - - "3.6" - - "3.7" -before_install: - - pip install coveralls -install: - - pip install -U pip - - pip install cython - - cython logbook/_speedups.pyx -env: - - DISABLE_LOGBOOK_CEXT=True - - CYBUILD=True - -script: - - pip install -e .[all] - - if [[ $GEVENT == 'True' ]] ; then pip install gevent; fi - - pytest --cov=logbook -r s tests - -matrix: - exclude: - include: - - python: "3.6" - env: GEVENT=True CYBUILD=True - - python: "2.7" - env: GEVENT=True CYBUILD=True - -after_success: - - coveralls - -notifications: - email: - recipients: - - vmalloc@gmail.com - irc: - channels: - - chat.freenode.net#pocoo - on_success: change - on_failure: always - use_notice: true - skip_join: true -deploy: - - provider: pypi - user: vmalloc - password: - secure: WFmuAbtBDIkeZArIFQRCwyO1TdvF2PaZpo75r3mFgnY+aWm75cdgjZKoNqVprF/f+v9EsX2kDdQ7ZfuhMLgP8MNziB+ty7579ZDGwh64jGoi+DIoeblAFu5xNAqjvhie540uCE8KySk9s+Pq5EpOA5w18V4zxTw+h6tnBQ0M9cQ= - on: - python: "3.7" - condition: $CYBUILD = 'True' - tags: true - repo: getlogbook/logbook - distributions: "sdist" diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index ff821af..0000000 --- a/appveyor.yml +++ /dev/null @@ -1,77 +0,0 @@ -cache: - - C:\Users\appveyor\AppData\Local\pip\Cache\wheels - -environment: - global: - # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the - # /E:ON and /V:ON options are not enabled in the batch script intepreter - # See: http://stackoverflow.com/a/13751649/163740 - BUILD: "cmd /E:ON /V:ON /C .\\.appveyor\\build.cmd" - PYPI_USERNAME: - secure: ixvjwUN/HsSfGkU3OvtQ8Q== - PYPI_PASSWORD: - secure: KOr+oEHZJmo1el3bT+ivmQ== - ENABLE_LOGBOOK_NTEVENTLOG_TESTS: "TRUE" - - matrix: - - PYTHON: "C:\\Python27" - - PYTHON: "C:\\Python27" - CYBUILD: "TRUE" - - - PYTHON: "C:\\Python27-x64" - - PYTHON: "C:\\Python27-x64" - CYBUILD: "TRUE" - - - PYTHON: "C:\\Python35" - - PYTHON: "C:\\Python35" - CYBUILD: "TRUE" - - - PYTHON: "C:\\Python35-x64" - - PYTHON: "C:\\Python35-x64" - CYBUILD: "TRUE" - - - PYTHON: "C:\\Python36" - - PYTHON: "C:\\Python36" - CYBUILD: "TRUE" - - - PYTHON: "C:\\Python36-x64" - - PYTHON: "C:\\Python36-x64" - CYBUILD: "TRUE" - - - PYTHON: "C:\\Python37" - - PYTHON: "C:\\Python37" - CYBUILD: "TRUE" - - - PYTHON: "C:\\Python37-x64" - - PYTHON: "C:\\Python37-x64" - CYBUILD: "TRUE" - -init: - - echo %PYTHON% - - set PATH=%PYTHON%;%PYTHON%\Scripts;%PATH% - -install: - - ".appveyor\\prepare.bat" - -build: off - -test_script: - - py.test -r s tests - -after_test: - - ".appveyor\\after_test.bat" - -artifacts: - # Archive the generated packages in the ci.appveyor.com build report. - - path: dist\*.whl - -deploy: - description: "" - provider: GitHub - auth_token: - secure: 0yLUo/V+wwSvSFk9nBW/77RN9iTjJA1B5p/TM1XgVLPPFEZWkH756jyJ0FOmtJPt - artifact: /.*\.whl/ - draft: true - prerelease: false - on: - appveyor_repo_tag: true From 0a7008d1d666e3c52de163ac8a588ef23b405233 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 25 Jun 2023 22:25:00 +0200 Subject: [PATCH 18/70] Enable NTEventLogHandler tests on GitHub Actions --- .github/workflows/main.yml | 3 +++ tox.ini | 1 + 2 files changed, 4 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7903019..8b0fbab 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -55,6 +55,9 @@ jobs: python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] arch: ["x86", "x64"] + env: + ENABLE_LOGBOOK_NTEVENTLOG_TESTS: "1" + steps: - uses: "actions/checkout@v3" - uses: "actions/setup-python@v4" diff --git a/tox.ini b/tox.ini index 58b2d3c..74fafc1 100644 --- a/tox.ini +++ b/tox.ini @@ -10,6 +10,7 @@ set_env = pass_env = REDIS_HOST REDIS_PORT + ENABLE_LOGBOOK_NTEVENTLOG_TESTS commands = pytest {posargs} From 47c12ee0413bf1cbfb6b32aaf7f0e4a3cd8f1fca Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 25 Jun 2023 23:26:40 +0200 Subject: [PATCH 19/70] Add nteventlog extra for pywin32 --- pyproject.toml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 866c464..869f2be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,9 @@ redis = ["redis"] zmq = ["pyzmq"] jinja = ["Jinja2"] compression = ["brotli"] -all = ["Logbook[execnet,sqlalchemy,redis,zmq,jinja,compression]"] +all = ["Logbook[execnet,sqlalchemy,redis,zmq,jinja,compression,nteventlog]"] +nteventlog = ["pywin32; platform_system == 'Windows'"] + [tool.pytest.ini_options] testpaths = ["tests"] From 301c50838f181f6e11563e980fa60df2e644c263 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Mon, 26 Jun 2023 00:14:12 +0200 Subject: [PATCH 20/70] Support SQLAlchemy 1.4/2.0 --- logbook/ticketing.py | 102 +++++++++++++++++++++++-------------------- 1 file changed, 55 insertions(+), 47 deletions(-) diff --git a/logbook/ticketing.py b/logbook/ticketing.py index ed63c3b..3757434 100644 --- a/logbook/ticketing.py +++ b/logbook/ticketing.py @@ -23,7 +23,7 @@ class Ticket: def __init__(self, db, row): self.db = db - self.__dict__.update(row) + self.__dict__.update(row._mapping) @cached_property def last_occurrence(self): @@ -64,11 +64,11 @@ class Occurrence(LogRecord): """Represents an occurrence of a ticket.""" def __init__(self, db, row): - self.update_from_dict(json.loads(row["data"])) + self.update_from_dict(json.loads(row.data)) self.db = db - self.time = row["time"] - self.ticket_id = row["ticket_id"] - self.occurrence_id = row["occurrence_id"] + self.time = row.time + self.ticket_id = row.ticket_id + self.occurrence_id = row.occurrence_id class BackendBase: @@ -144,9 +144,7 @@ def setup_backend(self): # Pool recycle keeps connections from going stale, # which happens in MySQL Databases # Pool size is more custom for out stack - self.engine = create_engine( - engine_or_uri, convert_unicode=True, pool_recycle=360, pool_size=1000 - ) + self.engine = create_engine(engine_or_uri, pool_recycle=360, pool_size=1000) # Create session factory using session maker session = sessionmaker() @@ -211,8 +209,8 @@ def record_ticket(self, record, data, hash, app_id): # Can use the session instead engine.connection and transaction s = self.session try: - q = self.tickets.select(self.tickets.c.record_hash == hash) - row = s.execute(q).fetchone() + q = self.tickets.select().where(self.tickets.c.record_hash == hash) + row = s.execute(q).one_or_none() if row is None: row = s.execute( self.tickets.insert().values( @@ -228,7 +226,7 @@ def record_ticket(self, record, data, hash, app_id): ) ticket_id = row.inserted_primary_key[0] else: - ticket_id = row["ticket_id"] + ticket_id = row.ticket_id s.execute( self.occurrences.insert().values( ticket_id=ticket_id, @@ -255,60 +253,70 @@ def record_ticket(self, record, data, hash, app_id): def count_tickets(self): """Returns the number of tickets.""" - return self.engine.execute(self.tickets.count()).fetchone()[0] + from sqlalchemy import func, select + + with self.engine.begin() as conn: + return conn.scalar(select(func.count()).select_from(self.tickets)) def get_tickets(self, order_by="-last_occurrence_time", limit=50, offset=0): """Selects tickets from the database.""" - return [ - Ticket(self, row) - for row in self.engine.execute( - self._order(self.tickets.select(), self.tickets, order_by) - .limit(limit) - .offset(offset) - ).fetchall() - ] + with self.engine.begin() as conn: + return [ + Ticket(self, row) + for row in conn.execute( + self._order(self.tickets.select(), self.tickets, order_by) + .limit(limit) + .offset(offset) + ) + ] def solve_ticket(self, ticket_id): """Marks a ticket as solved.""" - self.engine.execute( - self.tickets.update() - .where(self.tickets.c.ticket_id == ticket_id) - .values(solved=True) - ) + with self.engine.begin() as conn: + conn.execute( + self.tickets.update() + .where(self.tickets.c.ticket_id == ticket_id) + .values(solved=True) + ) def delete_ticket(self, ticket_id): """Deletes a ticket from the database.""" - self.engine.execute( - self.occurrences.delete().where(self.occurrences.c.ticket_id == ticket_id) - ) - self.engine.execute( - self.tickets.delete().where(self.tickets.c.ticket_id == ticket_id) - ) + with self.engine.begin() as conn: + conn.execute( + self.occurrences.delete().where( + self.occurrences.c.ticket_id == ticket_id + ) + ) + conn.execute( + self.tickets.delete().where(self.tickets.c.ticket_id == ticket_id) + ) def get_ticket(self, ticket_id): """Return a single ticket with all occurrences.""" - row = self.engine.execute( - self.tickets.select().where(self.tickets.c.ticket_id == ticket_id) - ).fetchone() + with self.engine.begin() as conn: + row = conn.execute( + self.tickets.select().where(self.tickets.c.ticket_id == ticket_id) + ).one_or_none() if row is not None: return Ticket(self, row) def get_occurrences(self, ticket, order_by="-time", limit=50, offset=0): """Selects occurrences from the database for a ticket.""" - return [ - Occurrence(self, row) - for row in self.engine.execute( - self._order( - self.occurrences.select().where( - self.occurrences.c.ticket_id == ticket - ), - self.occurrences, - order_by, + with self.engine.begin() as conn: + return [ + Occurrence(self, row) + for row in conn.execute( + self._order( + self.occurrences.select().where( + self.occurrences.c.ticket_id == ticket + ), + self.occurrences, + order_by, + ) + .limit(limit) + .offset(offset) ) - .limit(limit) - .offset(offset) - ).fetchall() - ] + ] class MongoDBBackend(BackendBase): From f30a4aca8f882545457a230fabc07e9a1bb70cdf Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Mon, 26 Jun 2023 00:38:07 +0200 Subject: [PATCH 21/70] Rerun racy logged_if_slow tests --- .github/workflows/main.yml | 6 +++--- pyproject.toml | 2 +- tests/test_utils.py | 5 +++++ 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 8b0fbab..a8be302 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -43,7 +43,7 @@ jobs: python -m pip install --upgrade tox tox-gh-actions - name: "Run tox targets for ${{ matrix.python-version }}" - run: "python -m tox" + run: "python -m tox -- -r aR" windows: name: "Windows (${{ matrix.python-version }}, ${{ matrix.arch }})" @@ -71,7 +71,7 @@ jobs: - run: python -m pip install --upgrade tox tox-gh-actions - name: "Run tox targets for ${{ matrix.python-version }} on ${{ matrix.arch }}" - run: "python -m tox -- -k 'not redis'" + run: "python -m tox -- -r aR -k 'not redis'" macos: name: "macOS (${{ matrix.python-version }})" @@ -97,7 +97,7 @@ jobs: python -m pip install --upgrade tox tox-gh-actions - name: "Run tox targets for ${{ matrix.python-version }}" - run: "python -m tox -- -k 'not redis'" + run: "python -m tox -- -r aR -k 'not redis'" pre-commit: runs-on: ubuntu-latest diff --git a/pyproject.toml b/pyproject.toml index 869f2be..a8a6773 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ Documentation = "https://logbook.readthedocs.io" "Source Code" = "https://github.com/getlogbook/logbook" [project.optional-dependencies] -test = ["pytest>=6"] +test = ["pytest>=6", "pytest-rerunfailures"] dev = ["Logbook[test]", "tox>=4"] execnet = ["execnet>=1.0.9"] sqlalchemy = ["sqlalchemy"] diff --git a/tests/test_utils.py b/tests/test_utils.py index b1f21d6..53fe519 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -19,6 +19,7 @@ from unittest.mock import Mock, call +@pytest.mark.flaky(reruns=5) def test_logged_if_slow_reached(test_handler): with test_handler.applicationbound(): with logged_if_slow("checking...", threshold=_THRESHOLD): @@ -28,6 +29,7 @@ def test_logged_if_slow_reached(test_handler): assert record.message == "checking..." +@pytest.mark.flaky(reruns=5) def test_logged_if_slow_did_not_reached(test_handler): with test_handler.applicationbound(): with logged_if_slow("checking...", threshold=_THRESHOLD): @@ -35,6 +37,7 @@ def test_logged_if_slow_did_not_reached(test_handler): assert len(test_handler.records) == 0 +@pytest.mark.flaky(reruns=5) def test_logged_if_slow_logger(): logger = Mock() @@ -44,6 +47,7 @@ def test_logged_if_slow_logger(): assert logger.log.call_args == call(logbook.DEBUG, "checking...") +@pytest.mark.flaky(reruns=5) def test_logged_if_slow_level(test_handler): with test_handler.applicationbound(): with logged_if_slow("checking...", threshold=_THRESHOLD, level=logbook.WARNING): @@ -52,6 +56,7 @@ def test_logged_if_slow_level(test_handler): assert test_handler.records[0].level == logbook.WARNING +@pytest.mark.flaky(reruns=5) def test_logged_if_slow_deprecated(logger, test_handler): with test_handler.applicationbound(): with logged_if_slow("checking...", threshold=_THRESHOLD, func=logbook.error): From dd5b2591d0f35de922850f5a9ca7fcf43fce682d Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Mon, 26 Jun 2023 01:03:40 +0200 Subject: [PATCH 22/70] Remove unused test file --- tests/test_ci.py | 23 ----------------------- 1 file changed, 23 deletions(-) delete mode 100644 tests/test_ci.py diff --git a/tests/test_ci.py b/tests/test_ci.py deleted file mode 100644 index 087685d..0000000 --- a/tests/test_ci.py +++ /dev/null @@ -1,23 +0,0 @@ -import os - -import pytest - -from .utils import appveyor, travis - - -@appveyor -def test_appveyor_speedups(): - if os.environ.get("CYBUILD"): - import logbook._speedups - else: - with pytest.raises(ImportError): - import logbook._speedups - - -@travis -def test_travis_speedups(): - if os.environ.get("CYBUILD"): - import logbook._speedups - else: - with pytest.raises(ImportError): - import logbook._speedups From f7d0a42d455ddaace2467cee7c2812049e15fb97 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Mon, 26 Jun 2023 23:54:01 +0200 Subject: [PATCH 23/70] Drop compatibility for <3.7 --- benchmark/bench_disabled_introspection.py | 2 +- benchmark/bench_disabled_logger.py | 2 +- benchmark/bench_enabled_introspection.py | 2 +- benchmark/bench_file_handler.py | 2 +- benchmark/bench_file_handler_unicode.py | 2 +- benchmark/bench_logger_creation.py | 2 +- benchmark/bench_logger_level_low.py | 2 +- benchmark/bench_logging_file_handler.py | 2 +- .../bench_logging_file_handler_unicode.py | 2 +- benchmark/bench_logging_logger_creation.py | 2 +- benchmark/bench_logging_logger_level_low.py | 2 +- benchmark/bench_logging_noop.py | 2 +- benchmark/bench_logging_noop_filter.py | 2 +- benchmark/bench_logging_stream_handler.py | 2 +- benchmark/bench_noop.py | 2 +- benchmark/bench_noop_filter.py | 2 +- benchmark/bench_noop_filter_on_handler.py | 2 +- benchmark/bench_redirect_from_logging.py | 2 +- benchmark/bench_redirect_to_logging.py | 2 +- benchmark/bench_stack_manipulation.py | 2 +- benchmark/bench_stream_handler.py | 2 +- benchmark/bench_test_handler.py | 2 +- logbook/base.py | 57 ++------ logbook/compat.py | 14 +- logbook/concurrency.py | 70 +++------- logbook/handlers.py | 84 ++++-------- logbook/helpers.py | 127 +----------------- logbook/more.py | 24 ++-- logbook/notifiers.py | 15 +-- logbook/queues.py | 15 +-- logbook/ticketing.py | 23 ++-- logbook/utils.py | 3 +- tests/test_asyncio.py | 4 - tests/test_file_handler.py | 35 +++-- tests/test_helpers.py | 12 +- tests/test_logging_api.py | 9 +- tests/test_logging_compat.py | 2 +- tests/test_mail_handler.py | 7 +- tests/test_more.py | 2 +- tests/test_queues.py | 7 +- tests/test_ticketing.py | 9 +- tests/test_utils.py | 6 +- tests/utils.py | 2 +- 43 files changed, 152 insertions(+), 419 deletions(-) diff --git a/benchmark/bench_disabled_introspection.py b/benchmark/bench_disabled_introspection.py index 4585cef..87d4fe8 100644 --- a/benchmark/bench_disabled_introspection.py +++ b/benchmark/bench_disabled_introspection.py @@ -11,5 +11,5 @@ class DummyHandler(NullHandler): def run(): with Flags(introspection=False): with DummyHandler() as handler: - for x in xrange(500): + for x in range(500): log.warning("this is not handled") diff --git a/benchmark/bench_disabled_logger.py b/benchmark/bench_disabled_logger.py index 8644ca2..3572da6 100644 --- a/benchmark/bench_disabled_logger.py +++ b/benchmark/bench_disabled_logger.py @@ -6,5 +6,5 @@ def run(): - for x in xrange(500): + for x in range(500): log.warning("this is not handled") diff --git a/benchmark/bench_enabled_introspection.py b/benchmark/bench_enabled_introspection.py index 3147e80..11013bf 100644 --- a/benchmark/bench_enabled_introspection.py +++ b/benchmark/bench_enabled_introspection.py @@ -11,5 +11,5 @@ class DummyHandler(NullHandler): def run(): with Flags(introspection=True): with DummyHandler() as handler: - for x in xrange(500): + for x in range(500): log.warning("this is not handled") diff --git a/benchmark/bench_file_handler.py b/benchmark/bench_file_handler.py index ec19095..6b2c0cf 100644 --- a/benchmark/bench_file_handler.py +++ b/benchmark/bench_file_handler.py @@ -9,5 +9,5 @@ def run(): f = NamedTemporaryFile() with FileHandler(f.name) as handler: - for x in xrange(500): + for x in range(500): log.warning("this is handled") diff --git a/benchmark/bench_file_handler_unicode.py b/benchmark/bench_file_handler_unicode.py index 573116c..10059df 100644 --- a/benchmark/bench_file_handler_unicode.py +++ b/benchmark/bench_file_handler_unicode.py @@ -9,5 +9,5 @@ def run(): f = NamedTemporaryFile() with FileHandler(f.name) as handler: - for x in xrange(500): + for x in range(500): log.warning("this is handled \x6f") diff --git a/benchmark/bench_logger_creation.py b/benchmark/bench_logger_creation.py index 9a2a1ad..0019084 100644 --- a/benchmark/bench_logger_creation.py +++ b/benchmark/bench_logger_creation.py @@ -3,5 +3,5 @@ def run(): - for x in xrange(500): + for x in range(500): Logger("Test") diff --git a/benchmark/bench_logger_level_low.py b/benchmark/bench_logger_level_low.py index 009f445..53eba81 100644 --- a/benchmark/bench_logger_level_low.py +++ b/benchmark/bench_logger_level_low.py @@ -10,5 +10,5 @@ def run(): out = StringIO() with StreamHandler(out): - for x in xrange(500): + for x in range(500): log.warning("this is not handled") diff --git a/benchmark/bench_logging_file_handler.py b/benchmark/bench_logging_file_handler.py index c18d607..37be718 100644 --- a/benchmark/bench_logging_file_handler.py +++ b/benchmark/bench_logging_file_handler.py @@ -9,5 +9,5 @@ def run(): f = NamedTemporaryFile() handler = FileHandler(f.name) log.addHandler(handler) - for x in xrange(500): + for x in range(500): log.warning("this is handled") diff --git a/benchmark/bench_logging_file_handler_unicode.py b/benchmark/bench_logging_file_handler_unicode.py index de82c95..7ff319f 100644 --- a/benchmark/bench_logging_file_handler_unicode.py +++ b/benchmark/bench_logging_file_handler_unicode.py @@ -9,5 +9,5 @@ def run(): f = NamedTemporaryFile() handler = FileHandler(f.name) log.addHandler(handler) - for x in xrange(500): + for x in range(500): log.warning("this is handled \x6f") diff --git a/benchmark/bench_logging_logger_creation.py b/benchmark/bench_logging_logger_creation.py index 246ecc4..3004f29 100644 --- a/benchmark/bench_logging_logger_creation.py +++ b/benchmark/bench_logging_logger_creation.py @@ -5,6 +5,6 @@ def run(): - for x in xrange(500): + for x in range(500): getLogger("Test") del root_logger.manager.loggerDict["Test"] diff --git a/benchmark/bench_logging_logger_level_low.py b/benchmark/bench_logging_logger_level_low.py index 35286c5..4005d1d 100644 --- a/benchmark/bench_logging_logger_level_low.py +++ b/benchmark/bench_logging_logger_level_low.py @@ -11,5 +11,5 @@ def run(): out = StringIO() handler = StreamHandler(out) log.addHandler(handler) - for x in xrange(500): + for x in range(500): log.warning("this is not handled") diff --git a/benchmark/bench_logging_noop.py b/benchmark/bench_logging_noop.py index 64cf527..35a9716 100644 --- a/benchmark/bench_logging_noop.py +++ b/benchmark/bench_logging_noop.py @@ -11,5 +11,5 @@ def run(): handler = StreamHandler(out) handler.setLevel(ERROR) log.addHandler(handler) - for x in xrange(500): + for x in range(500): log.warning("this is not handled") diff --git a/benchmark/bench_logging_noop_filter.py b/benchmark/bench_logging_noop_filter.py index 24ce4ba..5fb3eb9 100644 --- a/benchmark/bench_logging_noop_filter.py +++ b/benchmark/bench_logging_noop_filter.py @@ -16,5 +16,5 @@ def run(): handler = StreamHandler(out) handler.addFilter(DisableFilter()) log.addHandler(handler) - for x in xrange(500): + for x in range(500): log.warning("this is not handled") diff --git a/benchmark/bench_logging_stream_handler.py b/benchmark/bench_logging_stream_handler.py index bacaaa8..ede4037 100644 --- a/benchmark/bench_logging_stream_handler.py +++ b/benchmark/bench_logging_stream_handler.py @@ -9,6 +9,6 @@ def run(): out = StringIO() log.addHandler(StreamHandler(out)) - for x in xrange(500): + for x in range(500): log.warning("this is not handled") assert out.getvalue().count("\n") == 500 diff --git a/benchmark/bench_noop.py b/benchmark/bench_noop.py index 6db4955..1b4291e 100644 --- a/benchmark/bench_noop.py +++ b/benchmark/bench_noop.py @@ -10,6 +10,6 @@ def run(): out = StringIO() with NullHandler(): with StreamHandler(out, level=ERROR) as handler: - for x in xrange(500): + for x in range(500): log.warning("this is not handled") assert not out.getvalue() diff --git a/benchmark/bench_noop_filter.py b/benchmark/bench_noop_filter.py index 99005dc..264dcb6 100644 --- a/benchmark/bench_noop_filter.py +++ b/benchmark/bench_noop_filter.py @@ -9,6 +9,6 @@ def run(): out = StringIO() with NullHandler(): with StreamHandler(out, filter=lambda r, h: False) as handler: - for x in xrange(500): + for x in range(500): log.warning("this is not handled") assert not out.getvalue() diff --git a/benchmark/bench_noop_filter_on_handler.py b/benchmark/bench_noop_filter_on_handler.py index becfbaa..8b43b7c 100644 --- a/benchmark/bench_noop_filter_on_handler.py +++ b/benchmark/bench_noop_filter_on_handler.py @@ -15,6 +15,6 @@ def run(): out = StringIO() with NullHandler(): with CustomStreamHandler(out) as handler: - for x in xrange(500): + for x in range(500): log.warning("this is not handled") assert not out.getvalue() diff --git a/benchmark/bench_redirect_from_logging.py b/benchmark/bench_redirect_from_logging.py index 2957f7a..4f7f4ac 100644 --- a/benchmark/bench_redirect_from_logging.py +++ b/benchmark/bench_redirect_from_logging.py @@ -13,6 +13,6 @@ def run(): out = StringIO() with StreamHandler(out): - for x in xrange(500): + for x in range(500): log.warning("this is not handled") assert out.getvalue().count("\n") == 500 diff --git a/benchmark/bench_redirect_to_logging.py b/benchmark/bench_redirect_to_logging.py index 04d5852..b7f5a85 100644 --- a/benchmark/bench_redirect_to_logging.py +++ b/benchmark/bench_redirect_to_logging.py @@ -12,6 +12,6 @@ def run(): out = StringIO() log.addHandler(StreamHandler(out)) with LoggingHandler(): - for x in xrange(500): + for x in range(500): log.warning("this is not handled") assert out.getvalue().count("\n") == 500 diff --git a/benchmark/bench_stack_manipulation.py b/benchmark/bench_stack_manipulation.py index cf2a7db..f923a0a 100644 --- a/benchmark/bench_stack_manipulation.py +++ b/benchmark/bench_stack_manipulation.py @@ -12,5 +12,5 @@ def run(): with NullHandler(): with StreamHandler(out, level=WARNING): with FileHandler(f.name, level=ERROR): - for x in xrange(100): + for x in range(100): list(Handler.stack_manager.iter_context_objects()) diff --git a/benchmark/bench_stream_handler.py b/benchmark/bench_stream_handler.py index a033612..a82b8f0 100644 --- a/benchmark/bench_stream_handler.py +++ b/benchmark/bench_stream_handler.py @@ -9,6 +9,6 @@ def run(): out = StringIO() with StreamHandler(out) as handler: - for x in xrange(500): + for x in range(500): log.warning("this is not handled") assert out.getvalue().count("\n") == 500 diff --git a/benchmark/bench_test_handler.py b/benchmark/bench_test_handler.py index bd4d8e8..aa17dc2 100644 --- a/benchmark/bench_test_handler.py +++ b/benchmark/bench_test_handler.py @@ -6,5 +6,5 @@ def run(): with TestHandler() as handler: - for x in xrange(500): + for x in range(500): log.warning("this is not handled") diff --git a/logbook/base.py b/logbook/base.py index 813b961..38d111b 100644 --- a/logbook/base.py +++ b/logbook/base.py @@ -16,17 +16,7 @@ from weakref import ref as weakref from logbook.concurrency import greenlet_get_ident, thread_get_ident, thread_get_name -from logbook.helpers import ( - PY2, - cached_property, - integer_types, - iteritems, - parse_iso8601, - string_types, - to_safe_json, - u, - xrange, -) +from logbook.helpers import cached_property, parse_iso8601, to_safe_json _has_speedups = False try: @@ -143,25 +133,10 @@ def utc_tz(): TRACE: "TRACE", NOTSET: "NOTSET", } -_reverse_level_names = {v: k for (k, v) in iteritems(_level_names)} +_reverse_level_names = {v: k for (k, v) in _level_names.items()} _missing = object() -# on python 3 we can savely assume that frame filenames will be in -# unicode, on Python 2 we have to apply a trick. -if PY2: - - def _convert_frame_filename(fn): - if isinstance(fn, unicode): - fn = fn.decode(sys.getfilesystemencoding() or "utf-8", "replace") - return fn - -else: - - def _convert_frame_filename(fn): - return fn - - def level_name_property(): """Returns a property that reflects the level as name from the internal level attribute. @@ -178,7 +153,7 @@ def _set_level_name(self, level): def lookup_level(level): """Return the integer representation of a logging level.""" - if isinstance(level, integer_types): + if isinstance(level, int): return level try: return _reverse_level_names[level] @@ -548,7 +523,7 @@ def to_dict(self, json_safe=False): """ self.pull_information() rv = {} - for key, value in iteritems(self.__dict__): + for key, value in self.__dict__.items(): if key[:1] != "_" and key not in self._noned_on_close: rv[key] = value # the extra dict is exported as regular dict @@ -575,7 +550,7 @@ def update_from_dict(self, d): setattr(self, key, None) self._information_pulled = True self._channel = None - if isinstance(self.time, string_types): + if isinstance(self.time, str): self.time = parse_iso8601(self.time) # TODO: Replace the lambda with str when we remove support for python 2` @@ -603,8 +578,8 @@ def message(self): except (UnicodeEncodeError, AttributeError): # we catch AttributeError since if msg is bytes, # it won't have the 'format' method - if sys.exc_info()[0] is AttributeError and ( - PY2 or not isinstance(self.msg, bytes) + if sys.exc_info()[0] is AttributeError and not isinstance( + self.msg, bytes ): # this is not the case we thought it is... raise @@ -634,8 +609,6 @@ def message(self): file=self.filename, lineno=self.lineno, ) - if PY2: - errormsg = errormsg.encode("utf-8") raise TypeError(errormsg) level_name = level_name_property() @@ -650,7 +623,7 @@ def calling_frame(self): while frm is not None and frm.f_globals is globs: frm = frm.f_back - for _ in xrange(self.frame_correction): + for _ in range(self.frame_correction): if frm is None: break @@ -688,7 +661,7 @@ def filename(self): fn = cf.f_code.co_filename if fn[:1] == "<" and fn[-1:] == ">": return fn - return _convert_frame_filename(os.path.abspath(fn)) + return os.path.abspath(fn) @cached_property def lineno(self): @@ -744,8 +717,6 @@ def formatted_exception(self): """ if self.exc_info is not None and self.exc_info != (None, None, None): rv = "".join(traceback.format_exception(*self.exc_info)) - if PY2: - rv = rv.decode("utf-8", "replace") return rv.rstrip() @cached_property @@ -753,7 +724,7 @@ def exception_name(self): """The name of the exception.""" if self.exc_info is not None: cls = self.exc_info[0] - return u(cls.__module__ + "." + cls.__name__) + return cls.__module__ + "." + cls.__name__ @property def exception_shortname(self): @@ -765,13 +736,7 @@ def exception_message(self): """The message of the exception.""" if self.exc_info is not None: val = self.exc_info[1] - try: - if PY2: - return unicode(val) - else: - return str(val) - except UnicodeError: - return str(val).decode("utf-8", "replace") + return str(val) @property def dispatcher(self): diff --git a/logbook/compat.py b/logbook/compat.py index 5c08c1a..079c2ef 100644 --- a/logbook/compat.py +++ b/logbook/compat.py @@ -8,14 +8,13 @@ :copyright: (c) 2010 by Armin Ronacher, Georg Brandl. :license: BSD, see LICENSE for more details. """ -import collections import logging import sys import warnings +from collections.abc import Mapping from datetime import date, datetime import logbook -from logbook.helpers import collections_abc, iteritems, string_types, u _epoch_ord = date(1970, 1, 1).toordinal() @@ -151,7 +150,7 @@ def convert_record(self, old_record): kwargs = None # Logging allows passing a mapping object, in which case args will be a mapping. - if isinstance(args, collections_abc.Mapping): + if isinstance(args, Mapping): kwargs = args args = None record = LoggingCompatRecord( @@ -190,7 +189,7 @@ def __init__(self, logger=None, level=logbook.NOTSET, filter=None, bubble=False) logbook.Handler.__init__(self, level, filter, bubble) if logger is None: logger = logging.getLogger() - elif isinstance(logger, string_types): + elif isinstance(logger, str): logger = logging.getLogger(logger) self.logger = logger @@ -238,7 +237,7 @@ def convert_record(self, old_record): old_record.exc_info, **optional_kwargs, ) - for key, value in iteritems(old_record.extra): + for key, value in old_record.extra.items(): record.__dict__.setdefault(key, value) record.created = self.convert_time(old_record.time) return record @@ -281,10 +280,7 @@ def __init__(self): self._entered = False def message_to_unicode(self, message): - try: - return u(str(message)) - except UnicodeError: - return str(message).decode("utf-8", "replace") + return str(message) def make_record(self, message, exception, filename, lineno): category = exception.__name__ diff --git a/logbook/concurrency.py b/logbook/concurrency.py index 7159327..3dc653e 100644 --- a/logbook/concurrency.py +++ b/logbook/concurrency.py @@ -1,3 +1,6 @@ +from contextvars import ContextVar +from itertools import count + has_gevent = True use_gevent = False try: @@ -134,17 +137,12 @@ def _is_owned(self): return self._owner == (thread_get_ident(), greenlet_get_ident()) else: + from _thread import _local as thread_local + from _thread import get_ident as thread_get_ident from threading import Lock as ThreadLock from threading import RLock as ThreadRLock from threading import currentThread - try: - from thread import _local as thread_local - from thread import get_ident as thread_get_ident - except ImportError: - from _thread import _local as thread_local - from _thread import get_ident as thread_get_ident - def thread_get_name(): return currentThread().getName() @@ -174,52 +172,22 @@ def new_fine_grained_lock(): return ThreadRLock() -has_contextvars = True -try: - import contextvars -except ImportError: - has_contextvars = False - -if has_contextvars: - from contextvars import ContextVar - from itertools import count - - context_ident_counter = count() - context_ident = ContextVar("context_ident") - - def context_get_ident(): - try: - return context_ident.get() - except LookupError: - ident = "context-%s" % next(context_ident_counter) - context_ident.set(ident) - return ident - - def is_context_enabled(): - try: - context_ident.get() - return True - except LookupError: - return False +context_ident_counter = count() +context_ident = ContextVar("context_ident") -else: - class ContextVar: - def __init__(self, name): - self.name = name - self.local = thread_local() - - def set(self, value): - self.local = value - - def get(self, default=None): - if self.local is None: - return default - - return default +def context_get_ident(): + try: + return context_ident.get() + except LookupError: + ident = "context-%s" % next(context_ident_counter) + context_ident.set(ident) + return ident - def context_get_ident(): - return 1 - def is_context_enabled(): +def is_context_enabled(): + try: + context_ident.get() + return True + except LookupError: return False diff --git a/logbook/handlers.py b/logbook/handlers.py index 25a2aab..155bd14 100644 --- a/logbook/handlers.py +++ b/logbook/handlers.py @@ -9,23 +9,17 @@ """ import errno import gzip -import io import math import os import re import socket import stat import sys - -try: - from hashlib import sha1 -except ImportError: - from sha import new as sha1 - -import collections import traceback from collections import deque +from collections.abc import Iterable, Mapping from datetime import datetime, timedelta +from hashlib import sha1 from textwrap import dedent from logbook.base import ( @@ -46,31 +40,16 @@ lookup_level, ) from logbook.concurrency import new_fine_grained_lock -from logbook.helpers import ( - PY2, - _is_text_stream, - b, - collections_abc, - integer_types, - is_unicode, - rename, - reraise, - string_types, - u, - with_metaclass, - xrange, - zip, -) +from logbook.helpers import rename -DEFAULT_FORMAT_STRING = u( +DEFAULT_FORMAT_STRING = ( "[{record.time:%Y-%m-%d %H:%M:%S.%f%z}] " "{record.level_name}: {record.channel}: {record.message}" ) -SYSLOG_FORMAT_STRING = u("{record.channel}: {record.message}") +SYSLOG_FORMAT_STRING = "{record.channel}: {record.message}" NTLOG_FORMAT_STRING = dedent( - u( - """ + """ Message Level: {record.level_name} Location: {record.filename}:{record.lineno} Module: {record.module} @@ -81,13 +60,11 @@ {record.message} """ - ) ).lstrip() -TEST_FORMAT_STRING = u("[{record.level_name}] {record.channel}: {record.message}") +TEST_FORMAT_STRING = "[{record.level_name}] {record.channel}: {record.message}" MAIL_FORMAT_STRING = dedent( - u( - """ + """ Subject: {handler.subject} Message type: {record.level_name} @@ -100,19 +77,16 @@ {record.message} """ - ) ).lstrip() MAIL_RELATED_FORMAT_STRING = dedent( - u( - """ + """ Message type: {record.level_name} Location: {record.filename}:{record.lineno} Module: {record.module} Function: {record.func_name} {record.message} """ - ) ).lstrip() SYSLOG_PORT = 514 @@ -159,7 +133,7 @@ def _magic_del(self): return type.__new__(cls, name, bases, d) -class Handler(with_metaclass(_HandlerType), ContextObject): +class Handler(ContextObject, metaclass=_HandlerType): """Handler instances dispatch logging events to specific destinations. The base handler class. Acts as a placeholder which defines the Handler @@ -338,7 +312,7 @@ def handle_error(self, record, exc_info): try: behaviour = Flags.get_flag("errors", "print") if behaviour == "raise": - reraise(exc_info[0], exc_info[1], exc_info[2]) + raise exc_info[1] elif behaviour == "print": traceback.print_exception(*(exc_info + (None, sys.stderr))) sys.stderr.write( @@ -436,7 +410,7 @@ def __call__(self, record, handler): line = self.format_record(record, handler) exc = self.format_exception(record) if exc: - line += u("\n") + exc + line += "\n" + exc return line @@ -482,9 +456,9 @@ def hash_record_raw(self, record): """Returns a hashlib object with the hash of the record.""" hash = sha1() hash.update(("%d\x00" % record.level).encode("ascii")) - hash.update((record.channel or u("")).encode("utf-8") + b("\x00")) - hash.update(record.filename.encode("utf-8") + b("\x00")) - hash.update(b(str(record.lineno))) + hash.update((record.channel or "").encode("utf-8") + b"\x00") + hash.update(record.filename.encode("utf-8") + b"\x00") + hash.update(str(record.lineno).encode("utf-8")) return hash def hash_record(self, record): @@ -497,9 +471,6 @@ def hash_record(self, record): return self.hash_record_raw(record).hexdigest() -_NUMBER_TYPES = integer_types + (float,) - - class LimitingHandlerMixin(HashingHandlerMixin): """Mixin class for handlers that want to limit emitting records. @@ -519,7 +490,7 @@ def __init__(self, record_limit, record_delta): self._record_limits = {} if record_delta is None: record_delta = timedelta(seconds=60) - elif isinstance(record_delta, _NUMBER_TYPES): + elif isinstance(record_delta, (int, float)): record_delta = timedelta(seconds=record_delta) self.record_delta = record_delta @@ -622,16 +593,7 @@ def flush(self): def encode(self, msg): """Encodes the message to the stream encoding.""" - stream = self.stream - rv = msg + "\n" - if (PY2 and is_unicode(rv)) or not ( - PY2 or is_unicode(rv) or _is_text_stream(stream) - ): - enc = self.encoding - if enc is None: - enc = getattr(stream, "encoding", None) or "utf-8" - rv = rv.encode(enc, "replace") - return rv + return msg + "\n" def write(self, item): """Writes a bytestring to the stream.""" @@ -946,7 +908,7 @@ def should_rollover(self, record, bytes): def perform_rollover(self): self.stream.close() - for x in xrange(self.backup_count - 1, 0, -1): + for x in range(self.backup_count - 1, 0, -1): src = "%s.%d" % (self._filename, x) dst = "%s.%d" % (self._filename, x + 1) try: @@ -1348,7 +1310,7 @@ class MailHandler(Handler, StringFormatterHandlerMixin, LimitingHandlerMixin): default_format_string = MAIL_FORMAT_STRING default_related_format_string = MAIL_RELATED_FORMAT_STRING - default_subject = u("Server Error in Application") + default_subject = "Server Error in Application" #: the maximum number of record hashes in the cache for the limiting #: feature. Afterwards, record_cache_prune percent of the oldest @@ -1517,10 +1479,10 @@ def get_connection(self): # - tuple to be unpacked to variables keyfile and certfile. # - secure=() equivalent to secure=True for backwards compatibility. # - secure=False equivalent to secure=None to disable. - if isinstance(self.secure, collections_abc.Mapping): + if isinstance(self.secure, Mapping): keyfile = self.secure.get("keyfile", None) certfile = self.secure.get("certfile", None) - elif isinstance(self.secure, collections_abc.Iterable): + elif isinstance(self.secure, Iterable): # Allow empty tuple for backwards compatibility if len(self.secure) == 0: keyfile = certfile = None @@ -1543,7 +1505,7 @@ def get_connection(self): con.ehlo() # Allow credentials to be a tuple or dict. - if isinstance(self.credentials, collections_abc.Mapping): + if isinstance(self.credentials, Mapping): credentials_args = () credentials_kwargs = self.credentials else: @@ -1726,7 +1688,7 @@ def __init__( self.facility = facility self.socktype = socktype - if isinstance(address, string_types): + if isinstance(address, str): self._connect_unixsocket() self.enveloper = self.unix_envelope default_delimiter = "\x00" diff --git a/logbook/helpers.py b/logbook/helpers.py index daccfd9..233f2eb 100644 --- a/logbook/helpers.py +++ b/logbook/helpers.py @@ -15,75 +15,6 @@ import time from datetime import datetime, timedelta -PY2 = sys.version_info[0] == 2 - -if PY2: - import collections as collections_abc - - import __builtin__ as _builtins -else: - import builtins as _builtins - import collections.abc as collections_abc - -try: - import json -except ImportError: - import simplejson as json - -if PY2: - from cStringIO import StringIO - - iteritems = dict.iteritems - from itertools import izip as zip - - xrange = _builtins.xrange -else: - from io import StringIO - - zip = _builtins.zip - xrange = range - iteritems = dict.items - -_IDENTITY = lambda obj: obj - -if PY2: - - def u(s): - return unicode(s, "unicode_escape") - -else: - u = _IDENTITY - -if PY2: - integer_types = (int, long) - string_types = (basestring,) -else: - integer_types = (int,) - string_types = (str,) - -if PY2: - import httplib as http_client -else: - from http import client as http_client - -if PY2: - # Yucky, but apparently that's the only way to do this - exec( - """ -def reraise(tp, value, tb=None): - raise tp, value, tb -""", - locals(), - globals(), - ) -else: - - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - - # this regexp also matches incompatible dates like 20070101 because # some libraries (like the python xmlrpclib modules) use this _iso8601_re = re.compile( @@ -93,22 +24,6 @@ def reraise(tp, value, tb=None): r"(?:T(\d{2}):(\d{2})(?::(\d{2}(?:\.\d+)?))?(Z|[+-]\d{2}:\d{2})?)?$" ) _missing = object() -if PY2: - - def b(x): - return x - - def _is_text_stream(x): - return True - -else: - import io - - def b(x): - return x.encode("ascii") - - def _is_text_stream(stream): - return isinstance(stream, io.TextIOBase) can_rename_open_file = False @@ -121,11 +36,6 @@ def _is_text_stream(stream): _MoveFileEx = ctypes.windll.kernel32.MoveFileExW def _rename(src, dst): - if PY2: - if not isinstance(src, unicode): - src = unicode(src, sys.getfilesystemencoding()) - if not isinstance(dst, unicode): - dst = unicode(dst, sys.getfilesystemencoding()) if _rename_atomic(src, dst): return True retry = 0 @@ -203,7 +113,7 @@ def rename(src, dst): rename = os.rename can_rename_open_file = True -_JSON_SIMPLE_TYPES = (bool, float) + integer_types + string_types +_JSON_SIMPLE_TYPES = (bool, float, int, str) def to_safe_json(data): @@ -214,8 +124,6 @@ def to_safe_json(data): def _convert(obj): if obj is None: return None - elif PY2 and isinstance(obj, str): - return obj.decode("utf-8", "replace") elif isinstance(obj, _JSON_SIMPLE_TYPES): return obj elif isinstance(obj, datetime): @@ -226,11 +134,11 @@ def _convert(obj): return tuple(_convert(x) for x in obj) elif isinstance(obj, dict): rv = {} - for key, value in iteritems(obj): - if not isinstance(key, string_types): + for key, value in obj.items(): + if not isinstance(key, str): key = str(key) - if not is_unicode(key): - key = u(key) + if not isinstance(key, str): + key = key rv[key] = _convert(value) return rv @@ -310,28 +218,3 @@ def __get__(self, obj, type=None): def get_iterator_next_method(it): return lambda: next(it) - - -# python 2 support functions and aliases -def is_unicode(x): - if PY2: - return isinstance(x, unicode) - return isinstance(x, str) - - -if PY2: - exec( - """def with_metaclass(meta): - class _WithMetaclassBase(object): - __metaclass__ = meta - return _WithMetaclassBase -""" - ) -else: - exec( - """def with_metaclass(meta): - class _WithMetaclassBase(object, metaclass=meta): - pass - return _WithMetaclassBase -""" - ) diff --git a/logbook/more.py b/logbook/more.py index b3e6648..4bff87b 100644 --- a/logbook/more.py +++ b/logbook/more.py @@ -12,6 +12,7 @@ import re from collections import defaultdict from functools import partial +from urllib.parse import parse_qsl, urlencode from logbook._termcolors import colorize from logbook.base import ERROR, NOTICE, NOTSET, RecordDispatcher, dispatch_record @@ -21,7 +22,6 @@ StringFormatter, StringFormatterHandlerMixin, ) -from logbook.helpers import PY2, iteritems, string_types, u from logbook.ticketing import BackendBase from logbook.ticketing import TicketingHandler as DatabaseHandler @@ -32,16 +32,8 @@ riemann_client = None # from riemann_client.transport import TCPTransport, UDPTransport, BlankTransport - -if PY2: - from urllib import urlencode - - from urlparse import parse_qsl -else: - from urllib.parse import parse_qsl, urlencode - _ws_re = re.compile(r"(\s+)", re.UNICODE) -TWITTER_FORMAT_STRING = u("[{record.channel}] {record.level_name}: {record.message}") +TWITTER_FORMAT_STRING = "[{record.channel}] {record.level_name}: {record.message}" TWITTER_ACCESS_TOKEN_URL = "https://twitter.com/oauth/access_token" NEW_TWEET_URL = "https://api.twitter.com/1/statuses/update.json" @@ -52,7 +44,7 @@ class CouchDBBackend(BackendBase): def setup_backend(self): from couchdb import Server - uri = self.options.pop("uri", u("")) + uri = self.options.pop("uri", "") couch = Server(uri) db_name = self.options.pop("db") self.database = couch[db_name] @@ -76,7 +68,7 @@ class TwitterFormatter(StringFormatter): max_length = 140 def format_exception(self, record): - return u("%s: %s") % (record.exception_shortname, record.exception_message) + return f"{record.exception_shortname}: {record.exception_message}" def __call__(self, record, handler): formatted = StringFormatter.__call__(self, record, handler) @@ -86,10 +78,10 @@ def __call__(self, record, handler): length += len(piece) if length > self.max_length: if length - len(piece) < self.max_length: - rv.append(u("ā€¦")) + rv.append("ā€¦") break rv.append(piece) - return u("").join(rv) + return "".join(rv) class TaggingLogger(RecordDispatcher): @@ -119,7 +111,7 @@ def __init__(self, name=None, tags=None): setattr(self, tag, partial(self.log, tag)) def log(self, tags, msg, *args, **kwargs): - if isinstance(tags, string_types): + if isinstance(tags, str): tags = [tags] exc_info = kwargs.pop("exc_info", None) extra = kwargs.pop("extra", {}) @@ -149,7 +141,7 @@ def __init__(self, handlers, filter=None, bubble=False): assert isinstance(handlers, dict) self._handlers = { tag: isinstance(handler, Handler) and [handler] or handler - for (tag, handler) in iteritems(handlers) + for (tag, handler) in handlers.items() } def emit(self, record): diff --git a/logbook/notifiers.py b/logbook/notifiers.py index 6c6df4e..26c6176 100644 --- a/logbook/notifiers.py +++ b/logbook/notifiers.py @@ -10,16 +10,13 @@ import base64 import os import sys +from http import client as http_client from time import time +from urllib.parse import parse_qsl, urlencode from logbook.base import ERROR, NOTSET, WARNING from logbook.handlers import Handler, LimitingHandlerMixin -from logbook.helpers import PY2, get_application_name, http_client, u - -if PY2: - from urllib import urlencode -else: - from urllib.parse import urlencode +from logbook.helpers import get_application_name def create_notification_handler(application_name=None, level=NOTSET, icon=None): @@ -52,7 +49,7 @@ def __init__( def make_title(self, record): """Called to get the title from the record.""" - return u("%s: %s") % (record.channel, record.level_name.title()) + return f"{record.channel}: {record.level_name.title()}" def make_text(self, record): """Called to get the text of the record.""" @@ -275,9 +272,7 @@ def emit(self, record): "/notifications/", headers={ "Authorization": "Basic " - + base64.b64encode( - (u("%s:%s") % (self.email, self.password)).encode("utf-8") - ).strip(), + + base64.b64encode(f"{self.email}:{self.password}".encode()).strip(), }, body=body, ) diff --git a/logbook/queues.py b/logbook/queues.py index cd00c29..4d1df23 100644 --- a/logbook/queues.py +++ b/logbook/queues.py @@ -10,18 +10,12 @@ import json import platform import threading +from queue import Empty, Full +from queue import Queue as ThreadQueue from threading import Lock, Thread from logbook.base import NOTSET, LogRecord, dispatch_record from logbook.handlers import Handler, WrapperHandler -from logbook.helpers import PY2, u - -if PY2: - from Queue import Empty, Full - from Queue import Queue as ThreadQueue -else: - from queue import Empty, Full - from queue import Queue as ThreadQueue class RedisHandler(Handler): @@ -467,7 +461,7 @@ def __init__(self, uri=None, context=None, multi=False): self.socket = self.context.socket(zmq.SUB) if uri is not None: self.socket.connect(uri) - self.socket.setsockopt_unicode(zmq.SUBSCRIBE, u("")) + self.socket.setsockopt_unicode(zmq.SUBSCRIBE, "") def __del__(self): try: @@ -495,8 +489,7 @@ def recv(self, timeout=None): if not self._zmq.select([self.socket], [], [], timeout)[0]: return rv = self.socket.recv(self._zmq.NOBLOCK) - if not PY2: - rv = rv.decode("utf-8") + rv = rv.decode("utf-8") return LogRecord.from_dict(json.loads(rv)) diff --git a/logbook/ticketing.py b/logbook/ticketing.py index 3757434..510e0db 100644 --- a/logbook/ticketing.py +++ b/logbook/ticketing.py @@ -13,7 +13,7 @@ from logbook.base import NOTSET, LogRecord, level_name_property from logbook.handlers import Handler, HashingHandlerMixin -from logbook.helpers import PY2, b, cached_property, u +from logbook.helpers import cached_property class Ticket: @@ -216,9 +216,9 @@ def record_ticket(self, record, data, hash, app_id): self.tickets.insert().values( record_hash=hash, level=record.level, - channel=record.channel or u(""), - location=u("%s:%d") % (record.filename, record.lineno), - module=record.module or u(""), + channel=record.channel or "", + location="%s:%d" % (record.filename, record.lineno), + module=record.module or "", occurrence_count=0, solved=False, app_id=app_id, @@ -348,7 +348,7 @@ def setup_backend(self): from pymongo.errors import AutoReconnect _connection = None - uri = self.options.pop("uri", u("")) + uri = self.options.pop("uri", "") _connection_attempts = 0 parsed_uri = parse_uri(uri, Connection.PORT) @@ -399,9 +399,9 @@ def record_ticket(self, record, data, hash, app_id): doc = { "record_hash": hash, "level": record.level, - "channel": record.channel or u(""), - "location": u("%s:%d") % (record.filename, record.lineno), - "module": record.module or u(""), + "channel": record.channel or "", + "location": "%s:%d" % (record.filename, record.lineno), + "module": record.module or "", "occurrence_count": 0, "solved": False, "app_id": app_id, @@ -482,9 +482,8 @@ def hash_record_raw(self, record): hash = HashingHandlerMixin.hash_record_raw(self, record) if self.hash_salt is not None: hash_salt = self.hash_salt - if not PY2 or isinstance(hash_salt, unicode): - hash_salt = hash_salt.encode("utf-8") - hash.update(b("\x00") + hash_salt) + hash_salt = hash_salt.encode("utf-8") + hash.update(b"\x00" + hash_salt) return hash @@ -524,7 +523,7 @@ def __init__( **db_options, ): if hash_salt is None: - hash_salt = u("apphash-") + app_id + hash_salt = "apphash-" + app_id TicketingBaseHandler.__init__(self, hash_salt, level, filter, bubble) if backend is None: backend = self.default_backend diff --git a/logbook/utils.py b/logbook/utils.py index 7f676d0..db79f25 100644 --- a/logbook/utils.py +++ b/logbook/utils.py @@ -4,7 +4,6 @@ from contextlib import contextmanager from .base import DEBUG, Logger -from .helpers import string_types class _SlowContextNotifier: @@ -172,7 +171,7 @@ def deprecated(func=None, message=None): .. versionadded:: 0.12 """ - if isinstance(func, string_types): + if isinstance(func, str): assert message is None message = func func = None diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 50544b5..d136728 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -1,14 +1,10 @@ import asyncio -import pytest - import logbook -from logbook.concurrency import has_contextvars ITERATIONS = 100 -@pytest.mark.skipif(not has_contextvars, reason="Contexvars not available") def test_asyncio_context_management(logger): h1 = logbook.TestHandler() h2 = logbook.TestHandler() diff --git a/tests/test_file_handler.py b/tests/test_file_handler.py index 5524765..6014d39 100644 --- a/tests/test_file_handler.py +++ b/tests/test_file_handler.py @@ -7,7 +7,6 @@ import pytest import logbook -from logbook.helpers import u, xrange from .utils import LETTERS, capturing_stderr_context @@ -27,7 +26,7 @@ def test_file_handler(logfile, activation_strategy, logger): def test_file_handler_unicode(logfile, activation_strategy, logger): with capturing_stderr_context() as captured: with activation_strategy(logbook.FileHandler(logfile)): - logger.info(u("\u0431")) + logger.info("\u0431") assert not captured.getvalue() @@ -85,7 +84,7 @@ def test_rotating_file_handler(logfile, activation_strategy, logger): ) handler.format_string = "{record.message}" with activation_strategy(handler): - for c, x in zip(LETTERS, xrange(32)): + for c, x in zip(LETTERS, range(32)): logger.warn(c * 256) files = [x for x in os.listdir(os.path.dirname(logfile)) if x.startswith(basename)] files.sort() @@ -110,18 +109,18 @@ def fake_record(message, year, month, day, hour=0, minute=0, second=0): return lr with activation_strategy(handler): - for x in xrange(10): + for x in range(10): handler.handle(fake_record("First One", 2010, 1, 5, x + 1)) - for x in xrange(20): + for x in range(20): handler.handle(fake_record("Second One", 2010, 1, 6, x + 1)) - for x in xrange(10): + for x in range(10): handler.handle(fake_record("Third One", 2010, 1, 7, x + 1)) - for x in xrange(20): + for x in range(20): handler.handle(fake_record("Last One", 2010, 1, 8, x + 1)) files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith("trot")) - assert files == [f"trot-2010-01-0{i}.log" for i in xrange(5, 9)][-backup_count:] + assert files == [f"trot-2010-01-0{i}.log" for i in range(5, 9)][-backup_count:] with open(str(tmpdir.join("trot-2010-01-08.log"))) as f: assert f.readline().rstrip() == "[01:00] Last One" assert f.readline().rstrip() == "[02:00] Last One" @@ -149,18 +148,18 @@ def fake_record(message, year, month, day, hour=0, minute=0, second=0): return lr with activation_strategy(handler): - for x in xrange(10): + for x in range(10): handler.handle(fake_record("First One", 2010, 1, 5, x + 1)) - for x in xrange(20): + for x in range(20): handler.handle(fake_record("Second One", 2010, 1, 6, x + 1)) - for x in xrange(10): + for x in range(10): handler.handle(fake_record("Third One", 2010, 1, 7, x + 1)) - for x in xrange(20): + for x in range(20): handler.handle(fake_record("Last One", 2010, 1, 8, x + 1)) files = sorted(x for x in os.listdir(str(tmpdir)) if x.startswith("trot")) - assert files == [f"trot.log.2010-01-0{i}" for i in xrange(5, 9)][-backup_count:] + assert files == [f"trot.log.2010-01-0{i}" for i in range(5, 9)][-backup_count:] with open(str(tmpdir.join("trot.log.2010-01-08"))) as f: assert f.readline().rstrip() == "[01:00] Last One" assert f.readline().rstrip() == "[02:00] Last One" @@ -197,19 +196,19 @@ def fake_record(message, year, month, day, hour=0, minute=0, second=0): return lr with activation_strategy(handler): - for x in xrange(10): + for x in range(10): handler.handle(fake_record("First One", 2010, 1, 5, x + 1)) - for x in xrange(20): + for x in range(20): handler.handle(fake_record("Second One", 2010, 1, 6, x + 1)) - for x in xrange(10): + for x in range(10): handler.handle(fake_record("Third One", 2010, 1, 7, x + 1)) - for x in xrange(20): + for x in range(20): handler.handle(fake_record("Last One", 2010, 1, 8, x + 1)) computed_files = [x for x in os.listdir(str(tmpdir)) if x.startswith("trot")] expected_files = ["trot.log.2010-01-01"] if preexisting_file else [] - expected_files += [f"trot.log.2010-01-0{i}" for i in xrange(5, 8)] + expected_files += [f"trot.log.2010-01-0{i}" for i in range(5, 8)] expected_files += ["trot.log"] expected_files = expected_files[-backup_count:] diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 04d627c..d26e7bb 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -2,8 +2,6 @@ import pytest -from logbook.helpers import u - def test_jsonhelper(): from logbook.helpers import to_safe_json @@ -16,21 +14,21 @@ def __str__(self): [ None, "foo", - u("jƤger"), + "jƤger", 1, datetime(2000, 1, 1), - {"jƤger1": 1, u("jƤger2"): 2, Bogus(): 3, "invalid": object()}, + {"jƤger1": 1, "jƤger2": 2, Bogus(): 3, "invalid": object()}, object(), # invalid ] ) assert rv == [ None, - u("foo"), - u("jƤger"), + "foo", + "jƤger", 1, "2000-01-01T00:00:00Z", - {u("jƤger1"): 1, u("jƤger2"): 2, u("bogus"): 3, u("invalid"): None}, + {"jƤger1": 1, "jƤger2": 2, "bogus": 3, "invalid": None}, None, ] diff --git a/tests/test_logging_api.py b/tests/test_logging_api.py index 5a864cb..cbe70cb 100644 --- a/tests/test_logging_api.py +++ b/tests/test_logging_api.py @@ -4,7 +4,6 @@ import pytest import logbook -from logbook.helpers import iteritems, u, xrange def test_basic_logging(active_handler, logger): @@ -35,7 +34,7 @@ def test_exception_catching(active_handler, logger): def test_exception_catching_with_unicode(): """See https://github.com/getlogbook/logbook/issues/104""" try: - raise Exception(u("\u202a test \u202c")) + raise Exception("\u202a test \u202c") except: r = logbook.LogRecord("channel", "DEBUG", "test", exc_info=sys.exc_info()) r.exception_message @@ -62,7 +61,7 @@ def test_to_dict(logger, active_handler): exported = record.to_dict() record.close() imported = logbook.LogRecord.from_dict(exported) - for key, value in iteritems(record.__dict__): + for key, value in record.__dict__.items(): if key[0] == "_": continue assert value == getattr(imported, key) @@ -77,10 +76,10 @@ def test_pickle(active_handler, logger): record.pull_information() record.close() - for p in xrange(pickle.HIGHEST_PROTOCOL): + for p in range(pickle.HIGHEST_PROTOCOL): exported = pickle.dumps(record, p) imported = pickle.loads(exported) - for key, value in iteritems(record.__dict__): + for key, value in record.__dict__.items(): if key[0] == "_": continue imported_value = getattr(imported, key) diff --git a/tests/test_logging_compat.py b/tests/test_logging_compat.py index c371c25..74f08fa 100644 --- a/tests/test_logging_compat.py +++ b/tests/test_logging_compat.py @@ -1,11 +1,11 @@ import functools +from io import StringIO from random import randrange import pytest import logbook import logbook.compat -from logbook.helpers import StringIO from .utils import capturing_stderr_context diff --git a/tests/test_mail_handler.py b/tests/test_mail_handler.py index 41ee507..1119210 100644 --- a/tests/test_mail_handler.py +++ b/tests/test_mail_handler.py @@ -3,7 +3,6 @@ import sys import logbook -from logbook.helpers import u from .utils import capturing_stderr_context, make_fake_mail_handler @@ -18,7 +17,7 @@ def test_mail_handler(activation_strategy, logger): - subject = u("\xf8nicode") + subject = "\xf8nicode" handler = make_fake_mail_handler(subject=subject) with capturing_stderr_context() as fallback: with activation_strategy(handler): @@ -26,7 +25,7 @@ def test_mail_handler(activation_strategy, logger): try: 1 / 0 except Exception: - logger.exception(u("Viva la Espa\xf1a")) + logger.exception("Viva la Espa\xf1a") if not handler.mails: # if sending the mail failed, the reason should be on stderr @@ -44,7 +43,7 @@ def test_mail_handler(activation_strategy, logger): assert re.search(r"Location:.*%s" % re.escape(__file_without_pyc__), data) assert re.search(r"Module:\s+%s" % __name__, data) assert re.search(r"Function:\s+test_mail_handler", data) - body = u("Viva la Espa\xf1a") + body = "Viva la Espa\xf1a" if sys.version_info < (3, 0): body = body.encode("utf-8") assert body in data diff --git a/tests/test_more.py b/tests/test_more.py index 8a3a102..606964d 100644 --- a/tests/test_more.py +++ b/tests/test_more.py @@ -1,9 +1,9 @@ import sys +from io import StringIO import pytest import logbook -from logbook.helpers import StringIO from .utils import capturing_stderr_context, missing, require_module diff --git a/tests/test_queues.py b/tests/test_queues.py index fe0a52b..6a1a060 100644 --- a/tests/test_queues.py +++ b/tests/test_queues.py @@ -5,7 +5,6 @@ import pytest import logbook -from logbook.helpers import u from .utils import LETTERS, missing, require_module @@ -16,9 +15,9 @@ @require_module("zmq") def test_zeromq_handler(logger, handlers, subscriber): tests = [ - u("Logging something"), - u("Something with umlauts Ć¤Ć¶Ć¼"), - u("Something else for good measure"), + "Logging something", + "Something with umlauts Ć¤Ć¶Ć¼", + "Something else for good measure", ] for test in tests: for handler in handlers: diff --git a/tests/test_ticketing.py b/tests/test_ticketing.py index 276ac12..5022cb7 100644 --- a/tests/test_ticketing.py +++ b/tests/test_ticketing.py @@ -1,15 +1,10 @@ import os import sys - -try: - from thread import get_ident -except ImportError: - from _thread import get_ident +from _thread import get_ident import pytest import logbook -from logbook.helpers import xrange from .utils import require_module @@ -31,7 +26,7 @@ def test_basic_ticketing(logger): from logbook.ticketing import TicketingHandler with TicketingHandler("sqlite:///") as handler: - for x in xrange(5): + for x in range(5): logger.warn("A warning") sleep(0.2) logger.info("An error") diff --git a/tests/test_utils.py b/tests/test_utils.py index 53fe519..04453c5 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,4 +1,5 @@ from time import sleep +from unittest.mock import Mock, call import pytest @@ -13,11 +14,6 @@ _THRESHOLD = 0.1 -try: - from unittest.mock import Mock, call -except ImportError: - from unittest.mock import Mock, call - @pytest.mark.flaky(reruns=5) def test_logged_if_slow_reached(test_handler): diff --git a/tests/utils.py b/tests/utils.py index 5ee21d3..9ac1599 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -9,11 +9,11 @@ import os import sys from contextlib import contextmanager +from io import StringIO import pytest import logbook -from logbook.helpers import StringIO _missing = object() From 9d51f16d4768fcfdef1d4b9b217dd49d7a9c59c5 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 29 Jun 2023 01:20:31 +0200 Subject: [PATCH 24/70] Fix deprecated threading methods --- logbook/queues.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/logbook/queues.py b/logbook/queues.py index 4d1df23..e5807c4 100644 --- a/logbook/queues.py +++ b/logbook/queues.py @@ -85,7 +85,7 @@ def __init__( def _flush_task(self, time, stop_event): """Calls the method _flush_buffer every certain time.""" - while not self._stop_event.isSet(): + while not self._stop_event.is_set(): with self.lock: self._flush_buffer() self._stop_event.wait(time) @@ -272,7 +272,7 @@ def start(self): """Starts the task thread.""" self.running = True self._thread = Thread(target=self._target) - self._thread.setDaemon(True) + self._thread.daemon = True self._thread.start() def stop(self): @@ -636,7 +636,7 @@ def start(self): """Starts the task thread.""" self.running = True self._thread = Thread(target=self._target) - self._thread.setDaemon(True) + self._thread.daemon = True self._thread.start() def stop(self): From 5ae321e8ccb1799d611348af3b89c1b881c882aa Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 29 Jun 2023 01:21:57 +0200 Subject: [PATCH 25/70] Remove some remaining Python 2 stuff --- logbook/compat.py | 7 +------ logbook/concurrency.py | 19 ++++++------------- logbook/handlers.py | 5 ----- tests/conftest.py | 9 --------- tests/test_mail_handler.py | 2 -- tests/test_ticketing.py | 6 ------ tests/test_unicode.py | 9 +-------- tests/utils.py | 2 -- 8 files changed, 8 insertions(+), 51 deletions(-) diff --git a/logbook/compat.py b/logbook/compat.py index 079c2ef..f3fc2a8 100644 --- a/logbook/compat.py +++ b/logbook/compat.py @@ -222,11 +222,6 @@ def convert_time(self, dt): def convert_record(self, old_record): """Converts a record from logbook to logging.""" - if sys.version_info >= (2, 5): - # make sure 2to3 does not screw this up - optional_kwargs = {"func": getattr(old_record, "func_name")} - else: - optional_kwargs = {} record = logging.LogRecord( old_record.channel, self.convert_level(old_record.level), @@ -235,7 +230,7 @@ def convert_record(self, old_record): old_record.message, (), old_record.exc_info, - **optional_kwargs, + func=old_record.func_name, ) for key, value in old_record.extra.items(): record.__dict__.setdefault(key, value) diff --git a/logbook/concurrency.py b/logbook/concurrency.py index 3dc653e..3c0b100 100644 --- a/logbook/concurrency.py +++ b/logbook/concurrency.py @@ -1,5 +1,6 @@ from contextvars import ContextVar from itertools import count +from threading import current_thread has_gevent = True use_gevent = False @@ -31,25 +32,21 @@ def is_gevent_enabled(): return False +def thread_get_name(): + return current_thread().name + + if has_gevent: from gevent.monkey import get_original as _get_original ThreadLock = _get_original("threading", "Lock") ThreadRLock = _get_original("threading", "RLock") - try: - thread_get_ident = _get_original("threading", "get_ident") - except AttributeError: - # In 2.7, this is called _get_ident - thread_get_ident = _get_original("threading", "_get_ident") + thread_get_ident = _get_original("threading", "get_ident") thread_local = _get_original("threading", "local") from gevent.local import local as greenlet_local from gevent.lock import BoundedSemaphore from gevent.thread import get_ident as greenlet_get_ident - from gevent.threading import __threading__ - - def thread_get_name(): - return __threading__.currentThread().getName() class GreenletRLock: def __init__(self): @@ -141,10 +138,6 @@ def _is_owned(self): from _thread import get_ident as thread_get_ident from threading import Lock as ThreadLock from threading import RLock as ThreadRLock - from threading import currentThread - - def thread_get_name(): - return currentThread().getName() greenlet_get_ident = thread_get_ident diff --git a/logbook/handlers.py b/logbook/handlers.py index 155bd14..12b5fa8 100644 --- a/logbook/handlers.py +++ b/logbook/handlers.py @@ -1407,11 +1407,6 @@ def message_from_record(self, record, suppressed): "time(s) and was suppressed" % suppressed ) - # inconsistency in Python 2.5 - # other versions correctly return msg.get_payload() as str - if sys.version_info < (2, 6) and isinstance(body, unicode): - body = body.encode("utf-8") - msg.set_payload(body, "UTF-8") return msg diff --git a/tests/conftest.py b/tests/conftest.py index 3bbeedc..126ed86 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -112,12 +112,3 @@ def gevent(request): @request.addfinalizer def fin(): _disable_gevent() - - -def pytest_ignore_collect(path, config): - if "test_asyncio.py" in path.basename and ( - sys.version_info.major < 3 or sys.version_info.minor < 5 - ): - return True - - return False diff --git a/tests/test_mail_handler.py b/tests/test_mail_handler.py index 1119210..fc9c814 100644 --- a/tests/test_mail_handler.py +++ b/tests/test_mail_handler.py @@ -44,8 +44,6 @@ def test_mail_handler(activation_strategy, logger): assert re.search(r"Module:\s+%s" % __name__, data) assert re.search(r"Function:\s+test_mail_handler", data) body = "Viva la Espa\xf1a" - if sys.version_info < (3, 0): - body = body.encode("utf-8") assert body in data assert "\nTraceback (most" in data assert "1 / 0" in data diff --git a/tests/test_ticketing.py b/tests/test_ticketing.py index 5022cb7..e5786f6 100644 --- a/tests/test_ticketing.py +++ b/tests/test_ticketing.py @@ -12,13 +12,7 @@ if __file_without_pyc__.endswith(".pyc"): __file_without_pyc__ = __file_without_pyc__[:-1] -python_version = sys.version_info[:2] - -@pytest.mark.xfail( - os.name == "nt" and (python_version == (3, 2) or python_version == (3, 3)), - reason="Problem with in-memory sqlite on Python 3.2, 3.3 and Windows", -) @require_module("sqlalchemy") def test_basic_ticketing(logger): from time import sleep diff --git a/tests/test_unicode.py b/tests/test_unicode.py index a274685..487d405 100644 --- a/tests/test_unicode.py +++ b/tests/test_unicode.py @@ -1,16 +1,14 @@ import logbook -from .utils import capturing_stderr_context, require_py3 +from .utils import capturing_stderr_context -@require_py3 def test_default_format_unicode(logger): with capturing_stderr_context() as stream: logger.warn("\u2603") assert "WARNING: testlogger: \u2603" in stream.getvalue() -@require_py3 def test_default_format_encoded(logger): with capturing_stderr_context() as stream: # it's a string but it's in the right encoding so don't barf @@ -18,7 +16,6 @@ def test_default_format_encoded(logger): assert "WARNING: testlogger: \u2603" in stream.getvalue() -@require_py3 def test_default_format_bad_encoding(logger): with capturing_stderr_context() as stream: # it's a string, is wrong, but just dump it in the logger, @@ -28,7 +25,6 @@ def test_default_format_bad_encoding(logger): assert expected in stream.getvalue() -@require_py3 def test_custom_unicode_format_unicode(logger): format_string = "[{record.level_name}] " "{record.channel}: {record.message}" with capturing_stderr_context() as stream: @@ -37,7 +33,6 @@ def test_custom_unicode_format_unicode(logger): assert "[WARNING] testlogger: \u2603" in stream.getvalue() -@require_py3 def test_custom_string_format_unicode(logger): format_string = "[{record.level_name}] " "{record.channel}: {record.message}" with capturing_stderr_context() as stream: @@ -46,14 +41,12 @@ def test_custom_string_format_unicode(logger): assert "[WARNING] testlogger: \u2603" in stream.getvalue() -@require_py3 def test_unicode_message_encoded_params(logger): with capturing_stderr_context() as stream: logger.warn("\u2603 {0}", "\u2603".encode()) assert "WARNING: testlogger: \u2603 b'\\xe2\\x98\\x83'" in stream.getvalue() -@require_py3 def test_encoded_message_unicode_params(logger): with capturing_stderr_context() as stream: logger.warn("\u2603 {0}".encode(), "\u2603") diff --git a/tests/utils.py b/tests/utils.py index 9ac1599..f7c9443 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -30,8 +30,6 @@ def get_total_delta_seconds(delta): ) / 10**6 -require_py3 = pytest.mark.skipif(sys.version_info[0] < 3, reason="Requires Python 3") - appveyor = pytest.mark.skipif( os.environ.get("APPVEYOR") != "True", reason="AppVeyor CI test" ) From d67461503857c7c081d59d54c1f975281d01133b Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 29 Jun 2023 01:23:44 +0200 Subject: [PATCH 26/70] Fix __del__ when ZeroMQHandler is partially initialised --- logbook/queues.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/logbook/queues.py b/logbook/queues.py index e5807c4..fecb35a 100644 --- a/logbook/queues.py +++ b/logbook/queues.py @@ -252,7 +252,8 @@ def __del__(self): # not reachable. # If messages are pending on the socket, we wait 100ms for them to be # sent then we discard them. - self.close(linger=100) + if hasattr(self, "socket"): + self.close(linger=100) class ThreadController: From 6fdd5f25ffe223ad40659f316f56827a4be8a012 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 29 Jun 2023 01:25:34 +0200 Subject: [PATCH 27/70] Add ids to parameterized fixtures --- tests/conftest.py | 4 +++- tests/test_queues.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 126ed86..3dfdd4f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -98,7 +98,9 @@ def default_handler(request): pass else: - @pytest.fixture(scope="module", autouse=True, params=[False, True]) + @pytest.fixture( + scope="module", autouse=True, params=[False, True], ids=["nogevent", "gevent"] + ) def gevent(request): module_name = getattr(request.module, "__name__", "") if ( diff --git a/tests/test_queues.py b/tests/test_queues.py index 6a1a060..80bf497 100644 --- a/tests/test_queues.py +++ b/tests/test_queues.py @@ -395,6 +395,6 @@ def handlers_subscriber(multi): return handlers, subscriber -@pytest.fixture(params=[True, False]) +@pytest.fixture(params=[True, False], ids=["multi", "nomulti"]) def multi(request): return request.param From fb66a5f80c3b008f89c37d501d68fd53332b8710 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 29 Jun 2023 01:25:53 +0200 Subject: [PATCH 28/70] Test gevent in tox (and therefore CI) --- tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tox.ini b/tox.ini index 74fafc1..944ea11 100644 --- a/tox.ini +++ b/tox.ini @@ -5,6 +5,8 @@ envlist = py{37,38,39,310,311}{,-nospeedups},pypy,docs extras = all test +deps = + gevent set_env = nospeedups: DISABLE_LOGBOOK_CEXT_AT_RUNTIME=1 pass_env = From f37cb4375ce797a3710e749c260777c87f2e1c38 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 2 Jul 2023 23:23:33 +0100 Subject: [PATCH 29/70] Fix deprecated/unused imports --- logbook/concurrency.py | 4 ++-- logbook/notifiers.py | 2 +- tests/conftest.py | 2 -- tests/test_mail_handler.py | 7 +------ tests/test_ticketing.py | 5 +---- 5 files changed, 5 insertions(+), 15 deletions(-) diff --git a/logbook/concurrency.py b/logbook/concurrency.py index 3c0b100..e429fb7 100644 --- a/logbook/concurrency.py +++ b/logbook/concurrency.py @@ -134,10 +134,10 @@ def _is_owned(self): return self._owner == (thread_get_ident(), greenlet_get_ident()) else: - from _thread import _local as thread_local - from _thread import get_ident as thread_get_ident from threading import Lock as ThreadLock from threading import RLock as ThreadRLock + from threading import get_ident as thread_get_ident + from threading import local as thread_local greenlet_get_ident = thread_get_ident diff --git a/logbook/notifiers.py b/logbook/notifiers.py index 26c6176..4199adc 100644 --- a/logbook/notifiers.py +++ b/logbook/notifiers.py @@ -12,7 +12,7 @@ import sys from http import client as http_client from time import time -from urllib.parse import parse_qsl, urlencode +from urllib.parse import urlencode from logbook.base import ERROR, NOTSET, WARNING from logbook.handlers import Handler, LimitingHandlerMixin diff --git a/tests/conftest.py b/tests/conftest.py index 3dfdd4f..3dc4fab 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,3 @@ -import sys - import pytest import logbook diff --git a/tests/test_mail_handler.py b/tests/test_mail_handler.py index fc9c814..108ef00 100644 --- a/tests/test_mail_handler.py +++ b/tests/test_mail_handler.py @@ -1,16 +1,11 @@ import base64 import re -import sys +from unittest.mock import call, patch import logbook from .utils import capturing_stderr_context, make_fake_mail_handler -try: - from unittest.mock import Mock, call, patch -except ImportError: - from unittest.mock import Mock, call, patch - __file_without_pyc__ = __file__ if __file_without_pyc__.endswith(".pyc"): __file_without_pyc__ = __file_without_pyc__[:-1] diff --git a/tests/test_ticketing.py b/tests/test_ticketing.py index e5786f6..af988f6 100644 --- a/tests/test_ticketing.py +++ b/tests/test_ticketing.py @@ -1,8 +1,5 @@ import os -import sys -from _thread import get_ident - -import pytest +from threading import get_ident import logbook From ccfc4cfb8d746dc638693c0470a74d4dd51d0232 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 2 Jul 2023 23:31:20 +0100 Subject: [PATCH 30/70] Prefer contextvars ident over greenlet ident Without this, the following test fails: tests/test_asyncio.py::test_asyncio_context_management[gevent] I don't know how realistic such a scenario is, but it seems logical that if we prefer context_get_ident over thread_get_ident then the same should apply to greenlet_get_ident. --- logbook/_fallback.py | 6 +++--- logbook/_speedups.pyx | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/logbook/_fallback.py b/logbook/_fallback.py index 86275a2..c011f2b 100644 --- a/logbook/_fallback.py +++ b/logbook/_fallback.py @@ -162,10 +162,10 @@ def iter_context_objects(self): use_gevent = is_gevent_enabled() use_context = is_context_enabled() - if use_gevent: - tid = greenlet_get_ident() - elif use_context: + if use_context: tid = context_get_ident() + elif use_gevent: + tid = greenlet_get_ident() else: tid = thread_get_ident() diff --git a/logbook/_speedups.pyx b/logbook/_speedups.pyx index 994836a..b85961f 100644 --- a/logbook/_speedups.pyx +++ b/logbook/_speedups.pyx @@ -209,10 +209,10 @@ cdef class ContextStackManager: use_gevent = is_gevent_enabled() use_context = is_context_enabled() - if use_gevent: - tid = greenlet_get_ident() - elif use_context: + if use_context: tid = context_get_ident() + elif use_gevent: + tid = greenlet_get_ident() else: tid = thread_get_ident() From 6882b3bd867cdd62174c5c031a2678b988662e58 Mon Sep 17 00:00:00 2001 From: Mattijs Ugen <144798+akaIDIOT@users.noreply.github.com> Date: Thu, 6 Feb 2020 15:07:50 +0100 Subject: [PATCH 31/70] Transform exception instance into exc_info tuple on heavy_init Copies behaviour from Python's builtin logging module, allowing caller to pass a previously caught Exception instance to logger.exception through the exc_info kwarg. --- logbook/base.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/logbook/base.py b/logbook/base.py index 38d111b..998f1dc 100644 --- a/logbook/base.py +++ b/logbook/base.py @@ -487,6 +487,12 @@ def heavy_init(self): self.frame = sys._getframe(1) if self.exc_info is True: self.exc_info = sys.exc_info() + if isinstance(self.exc_info, BaseException): + self.exc_info = ( + type(self.exc_info), + self.exc_info, + self.exc_info.__traceback__, + ) def pull_information(self): """A helper function that pulls all frame-related information into From 583f1818ad801710a5035a2aebb6bb69846c2941 Mon Sep 17 00:00:00 2001 From: Mattijs Ugen <144798+akaIDIOT@users.noreply.github.com> Date: Thu, 6 Feb 2020 15:22:03 +0100 Subject: [PATCH 32/70] Test logging exception instance outside of except clause --- tests/test_log_record.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tests/test_log_record.py b/tests/test_log_record.py index 05f5a8d..de2c273 100644 --- a/tests/test_log_record.py +++ b/tests/test_log_record.py @@ -19,6 +19,19 @@ def test_exc_info_false(): assert not record.formatted_exception +def test_exc_info_exception_instance(logger): + with logbook.handlers.TestHandler() as handler: + try: + raise ValueError("error here") + except Exception as e: + error = e + logger.exception(exc_info=error) + [record] = handler.records + assert isinstance(record.exc_info, tuple) + assert len(record.exc_info) == 3 + assert "Traceback" in record.formatted_exception + + def test_extradict(active_handler, logger): logger.warn("Test warning") record = active_handler.records[0] From 44b864672884cbcfe9e828f93d7fa329af053446 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Tue, 4 Jul 2023 00:25:04 +0100 Subject: [PATCH 33/70] Support PathLike paths in FileHandler --- logbook/handlers.py | 2 +- tests/conftest.py | 17 ++++++++++++++--- tests/test_file_handler.py | 2 +- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/logbook/handlers.py b/logbook/handlers.py index 3da5d9f..d39662b 100644 --- a/logbook/handlers.py +++ b/logbook/handlers.py @@ -639,7 +639,7 @@ def __init__( StreamHandler.__init__( self, None, level, format_string, encoding, filter, bubble ) - self._filename = filename + self._filename = os.fspath(filename) self._mode = mode if delay: self.stream = None diff --git a/tests/conftest.py b/tests/conftest.py index 3dc4fab..85c049c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,5 @@ +from pathlib import Path + import pytest import logbook @@ -77,9 +79,18 @@ def activation_strategy(request): return request.param -@pytest.fixture -def logfile(tmpdir): - return str(tmpdir.join("logfile.log")) +class CustomPathLike: + def __init__(self, path): + self.path = path + + def __fspath__(self): + return self.path + + +@pytest.fixture(params=[Path, str, CustomPathLike]) +def logfile(tmp_path, request): + path = str(tmp_path / "logfile.log") + return request.param(path) @pytest.fixture diff --git a/tests/test_file_handler.py b/tests/test_file_handler.py index 6014d39..70ea2c4 100644 --- a/tests/test_file_handler.py +++ b/tests/test_file_handler.py @@ -55,7 +55,7 @@ def test_monitoring_file_handler(logfile, activation_strategy, logger): ) with activation_strategy(handler): logger.warn("warning message") - os.rename(logfile, logfile + ".old") + os.rename(logfile, os.fspath(logfile) + ".old") logger.warn("another warning message") handler.close() with open(logfile) as f: From 6ef4bfed15d5791cb5f9fc05512e3d522a9116c4 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 23 Jul 2023 01:24:59 +0100 Subject: [PATCH 34/70] src layout is better --- .gitignore | 2 +- MANIFEST.in | 2 +- docs/conf.py | 2 +- setup.cfg | 5 +++-- setup.py | 2 +- {logbook => src/logbook}/__init__.py | 0 {logbook => src/logbook}/__version__.py | 0 {logbook => src/logbook}/_fallback.py | 0 {logbook => src/logbook}/_speedups.pyx | 0 {logbook => src/logbook}/_termcolors.py | 0 {logbook => src/logbook}/base.py | 0 {logbook => src/logbook}/compat.py | 0 {logbook => src/logbook}/concurrency.py | 0 {logbook => src/logbook}/handlers.py | 0 {logbook => src/logbook}/helpers.py | 0 {logbook => src/logbook}/more.py | 0 {logbook => src/logbook}/notifiers.py | 0 {logbook => src/logbook}/queues.py | 0 {logbook => src/logbook}/ticketing.py | 0 {logbook => src/logbook}/utils.py | 0 20 files changed, 7 insertions(+), 6 deletions(-) rename {logbook => src/logbook}/__init__.py (100%) rename {logbook => src/logbook}/__version__.py (100%) rename {logbook => src/logbook}/_fallback.py (100%) rename {logbook => src/logbook}/_speedups.pyx (100%) rename {logbook => src/logbook}/_termcolors.py (100%) rename {logbook => src/logbook}/base.py (100%) rename {logbook => src/logbook}/compat.py (100%) rename {logbook => src/logbook}/concurrency.py (100%) rename {logbook => src/logbook}/handlers.py (100%) rename {logbook => src/logbook}/helpers.py (100%) rename {logbook => src/logbook}/more.py (100%) rename {logbook => src/logbook}/notifiers.py (100%) rename {logbook => src/logbook}/queues.py (100%) rename {logbook => src/logbook}/ticketing.py (100%) rename {logbook => src/logbook}/utils.py (100%) diff --git a/.gitignore b/.gitignore index 9e99917..8452c2a 100644 --- a/.gitignore +++ b/.gitignore @@ -60,7 +60,7 @@ target/ # Logbook specific / custom ignores .ropeproject -logbook/_speedups.c +src/logbook/_speedups.c env* .vagrant flycheck-* diff --git a/MANIFEST.in b/MANIFEST.in index c755652..19fd092 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,2 +1,2 @@ -include MANIFEST.in Makefile CHANGES logbook/_speedups.pyx tox.ini LICENSE +include MANIFEST.in Makefile CHANGES src/logbook/_speedups.pyx tox.ini LICENSE recursive-include tests * diff --git a/docs/conf.py b/docs/conf.py index 8a1084a..ab487dc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -50,7 +50,7 @@ # # The short X.Y version. with open( - os.path.join(os.path.dirname(__file__), "..", "logbook", "__version__.py") + os.path.join(os.path.dirname(__file__), "../src/logbook/__version__.py") ) as version_file: # can't use import here... version = release = version_file.read().strip().split("=")[1].strip()[1:-1] diff --git a/setup.cfg b/setup.cfg index ca6025f..a5e522f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,8 +3,9 @@ version = attr: logbook.__version__ [options] packages = find: +package_dir = + =src zip_safe = False [options.packages.find] -exclude = - tests +where = src diff --git a/setup.py b/setup.py index cf06c4d..1069bfb 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ def status_msgs(*msgs): from Cython.Build import cythonize ext_modules = cythonize( - [Extension("logbook._speedups", sources=["logbook/_speedups.pyx"])], + [Extension("logbook._speedups", sources=["src/logbook/_speedups.pyx"])], language_level=3, ) diff --git a/logbook/__init__.py b/src/logbook/__init__.py similarity index 100% rename from logbook/__init__.py rename to src/logbook/__init__.py diff --git a/logbook/__version__.py b/src/logbook/__version__.py similarity index 100% rename from logbook/__version__.py rename to src/logbook/__version__.py diff --git a/logbook/_fallback.py b/src/logbook/_fallback.py similarity index 100% rename from logbook/_fallback.py rename to src/logbook/_fallback.py diff --git a/logbook/_speedups.pyx b/src/logbook/_speedups.pyx similarity index 100% rename from logbook/_speedups.pyx rename to src/logbook/_speedups.pyx diff --git a/logbook/_termcolors.py b/src/logbook/_termcolors.py similarity index 100% rename from logbook/_termcolors.py rename to src/logbook/_termcolors.py diff --git a/logbook/base.py b/src/logbook/base.py similarity index 100% rename from logbook/base.py rename to src/logbook/base.py diff --git a/logbook/compat.py b/src/logbook/compat.py similarity index 100% rename from logbook/compat.py rename to src/logbook/compat.py diff --git a/logbook/concurrency.py b/src/logbook/concurrency.py similarity index 100% rename from logbook/concurrency.py rename to src/logbook/concurrency.py diff --git a/logbook/handlers.py b/src/logbook/handlers.py similarity index 100% rename from logbook/handlers.py rename to src/logbook/handlers.py diff --git a/logbook/helpers.py b/src/logbook/helpers.py similarity index 100% rename from logbook/helpers.py rename to src/logbook/helpers.py diff --git a/logbook/more.py b/src/logbook/more.py similarity index 100% rename from logbook/more.py rename to src/logbook/more.py diff --git a/logbook/notifiers.py b/src/logbook/notifiers.py similarity index 100% rename from logbook/notifiers.py rename to src/logbook/notifiers.py diff --git a/logbook/queues.py b/src/logbook/queues.py similarity index 100% rename from logbook/queues.py rename to src/logbook/queues.py diff --git a/logbook/ticketing.py b/src/logbook/ticketing.py similarity index 100% rename from logbook/ticketing.py rename to src/logbook/ticketing.py diff --git a/logbook/utils.py b/src/logbook/utils.py similarity index 100% rename from logbook/utils.py rename to src/logbook/utils.py From 03b61dedf00ddc48ed23f1a47369b9ca741a61f6 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sat, 22 Jul 2023 14:21:42 +0100 Subject: [PATCH 35/70] group_reflected_property had a __del__ method instead of __delete__ This was found because Cython 3 complained about __del__ having the wrong signature. --- src/logbook/_speedups.pyx | 2 +- tests/test_speedups.py | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) create mode 100644 tests/test_speedups.py diff --git a/src/logbook/_speedups.pyx b/src/logbook/_speedups.pyx index b85961f..a4bb55a 100644 --- a/src/logbook/_speedups.pyx +++ b/src/logbook/_speedups.pyx @@ -62,7 +62,7 @@ cdef class group_reflected_property: def __set__(self, obj, value): setattr(obj, self._name, value) - def __del__(self, obj): + def __delete__(self, obj): delattr(obj, self._name) diff --git a/tests/test_speedups.py b/tests/test_speedups.py new file mode 100644 index 0000000..c502df9 --- /dev/null +++ b/tests/test_speedups.py @@ -0,0 +1,32 @@ +import importlib + +import pytest + + +@pytest.fixture(params=["speedups", "fallback"]) +def speedups_module(request): + mod_name = f"logbook._{request.param}" + try: + return importlib.import_module(mod_name) + except ImportError: + pytest.skip(f"{mod_name} is not available") + + +def test_group_reflected_property(speedups_module): + class Group: + foo = "group" + + class A: + foo = speedups_module.group_reflected_property("foo", "default") + + def __init__(self, group=None): + self.group = group + + a = A() + assert a.foo == "default" + a.group = Group() + assert a.foo == "group" + a.foo = "set" + assert a.foo == "set" + del a.foo + assert a.foo == "group" From 8dc3f300b1091df7a4bc2c4970a4e9cf5e94427a Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Wed, 26 Jul 2023 21:44:27 +0200 Subject: [PATCH 36/70] Require sqlalchemy>=1.4 I forgot to do this in #331 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index a8a6773..de485d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,7 @@ Documentation = "https://logbook.readthedocs.io" test = ["pytest>=6", "pytest-rerunfailures"] dev = ["Logbook[test]", "tox>=4"] execnet = ["execnet>=1.0.9"] -sqlalchemy = ["sqlalchemy"] +sqlalchemy = ["sqlalchemy>=1.4"] redis = ["redis"] zmq = ["pyzmq"] jinja = ["Jinja2"] From 1c27771953742c6f534631768c893ac901192901 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Wed, 26 Jul 2023 22:18:17 +0200 Subject: [PATCH 37/70] Update README --- README.md | 27 ++++++++++----------------- 1 file changed, 10 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index d68501e..1bd3fcd 100644 --- a/README.md +++ b/README.md @@ -2,13 +2,10 @@ -| | | -| ------------------ | --------------------------- | -| Travis | [![Build Status][ti]][tl] | -| AppVeyor | [![Build Status][ai]][al] | -| Supported Versions | ![Supported Versions][vi] | -| Latest Version | [![Latest Version][pi]][pl] | -| Test Coverage | [![Test Coverage][ci]][cl] | +[![Latest Version][version-img]][pypi] +![Supported Python Versions][pyver-img] +[![GitHub Actions][gha-img]][gha] +[![PyPI Downloads][downloads-img]][pypi] Logbook is a nice logging replacement. @@ -16,13 +13,9 @@ It should be easy to setup, use and configure and support web applications :) For more information: https://logbook.readthedocs.org -[ti]: https://secure.travis-ci.org/getlogbook/logbook.svg?branch=master -[tl]: https://travis-ci.org/getlogbook/logbook -[ai]: https://ci.appveyor.com/api/projects/status/quu99exa26e06npp?svg=true -[vi]: https://img.shields.io/badge/python-2.7%2C3.5%2C3.6%2C3.7-green.svg -[di]: https://img.shields.io/pypi/dm/logbook.svg -[al]: https://ci.appveyor.com/project/vmalloc/logbook -[pi]: https://img.shields.io/pypi/v/logbook.svg -[pl]: https://pypi.org/pypi/Logbook -[ci]: https://coveralls.io/repos/getlogbook/logbook/badge.svg?branch=master&service=github -[cl]: https://coveralls.io/github/getlogbook/logbook?branch=master +[version-img]: https://img.shields.io/pypi/v/logbook.svg +[pypi]: https://pypi.org/pypi/Logbook +[gha-img]: https://img.shields.io/github/actions/workflow/status/getlogbook/logbook/main.yml +[gha]: https://github.com/getlogbook/logbook/actions +[downloads-img]: https://img.shields.io/pypi/dm/logbook +[pyver-img]: https://img.shields.io/pypi/pyversions/logbook From 7c6698afcb9e8b5fb942729e148ea5ffcda11300 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Wed, 26 Jul 2023 22:20:37 +0200 Subject: [PATCH 38/70] Start changelog for 1.6 --- CHANGES | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/CHANGES b/CHANGES index f990cd4..84d28df 100644 --- a/CHANGES +++ b/CHANGES @@ -1,6 +1,20 @@ Logbook Changelog ================= +Version 1.6.0 +------------- + +Released on + +- Dropped support for Python 2.7, 3.5, and 3.6. +- Uses pyproject.toml based build. +- Added nteventlog extra for NTEventLogHandler. +- Supports SQLAlchemy 1.4 and 2.0. +- Fix various deprecation warnings. +- exc_info arg may be a BaseException instance (thanks Mattijs Ugen) +- FileHandler supports path-like objects. +- Fixed bug which prevented compilation on Cython 3 + Version 1.5.1 ------------- From a5b8848408cd740c5849e3493a4b2e97a49cfff2 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Wed, 26 Jul 2023 22:25:29 +0200 Subject: [PATCH 39/70] Add cibuildwheel --- .github/workflows/wheels.yml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 .github/workflows/wheels.yml diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml new file mode 100644 index 0000000..580057c --- /dev/null +++ b/.github/workflows/wheels.yml @@ -0,0 +1,21 @@ +name: Build + +on: [push, pull_request] + +jobs: + build_wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-20.04, windows-2019, macos-11] + + steps: + - uses: actions/checkout@v3 + + - name: Build wheels + uses: pypa/cibuildwheel@v2.14.1 + + - uses: actions/upload-artifact@v3 + with: + path: ./wheelhouse/*.whl From 9ea83476eb0c20b11bc7b33042dc8f042f701839 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Wed, 26 Jul 2023 22:31:00 +0200 Subject: [PATCH 40/70] debug --- .github/workflows/wheels.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 580057c..ed050f0 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -15,6 +15,8 @@ jobs: - name: Build wheels uses: pypa/cibuildwheel@v2.14.1 + env: + CIBW_BUILD_VERBOSITY: "1" - uses: actions/upload-artifact@v3 with: From 51eca6ff228efcd7d813b98d5ab94cda6b168762 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Wed, 26 Jul 2023 22:32:00 +0200 Subject: [PATCH 41/70] Disable fail-fast --- .github/workflows/wheels.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index ed050f0..2dadef9 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -7,6 +7,7 @@ jobs: name: Build wheels on ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: + fail-fast: false matrix: os: [ubuntu-20.04, windows-2019, macos-11] From 547e50fc49ac2cbfbeabb8eb53a4e89252b07dff Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Wed, 26 Jul 2023 22:36:19 +0200 Subject: [PATCH 42/70] Disable PyPy build --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index de485d7..7476656 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,3 +47,6 @@ testpaths = ["tests"] [tool.isort] profile = "black" + +[tool.cibuildwheel] +skip = "pp*" From 3fb7a3d03d9bb40dcde980c9dba325281a77b59b Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Wed, 26 Jul 2023 22:53:34 +0200 Subject: [PATCH 43/70] Build arm wheels, disable intel i686 --- pyproject.toml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 7476656..9f6dffb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,3 +50,9 @@ profile = "black" [tool.cibuildwheel] skip = "pp*" + +[tool.cibuildwheel.macos] +archs = ["x86_64", "universal2", "arm64"] + +[tool.cibuildwheel.linux] +archs = ["x86_64", "aarch64"] From 202e12c376fe8a8396445d173ff1ab89b19171bb Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 27 Jul 2023 00:02:08 +0200 Subject: [PATCH 44/70] Use split build/publish setup --- .github/workflows/pypi-publish.yml | 45 +++++++++++++++++++++++++++++ .github/workflows/wheel-builder.yml | 44 ++++++++++++++++++++++++++++ .github/workflows/wheels.yml | 24 --------------- 3 files changed, 89 insertions(+), 24 deletions(-) create mode 100644 .github/workflows/pypi-publish.yml create mode 100644 .github/workflows/wheel-builder.yml delete mode 100644 .github/workflows/wheels.yml diff --git a/.github/workflows/pypi-publish.yml b/.github/workflows/pypi-publish.yml new file mode 100644 index 0000000..894a2d0 --- /dev/null +++ b/.github/workflows/pypi-publish.yml @@ -0,0 +1,45 @@ +# This is based on pyca/cryptography but we use cibuildwheel +# https://github.com/pyca/cryptography/blob/50ae9623df9181e5d08bbca0791ae69af4d3d446/.github/workflows/pypi-publish.yml +name: Publish to PyPI + +on: + workflow_dispatch: + inputs: + run_id: + description: The run of wheel-builder to use for finding artifacts. + required: true + environment: + description: Which PyPI environment to upload to + required: true + type: choice + options: [testpypi, pypi] + workflow_run: + workflows: ["Wheel Builder"] + types: [completed] + +jobs: + publish: + runs-on: ubuntu-latest + # We're not actually verifying that the triggering push event was for a + # tag, because github doesn't expose enough information to do so. + # wheel-builder.yml currently only has push events for tags. + if: github.event_name == 'workflow_dispatch' || (github.event.workflow_run.event == 'push' && github.event.workflow_run.conclusion == 'success') + environment: publish + permissions: + id-token: write + steps: + - uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615 # v2.27.0 + with: + path: dist/ + run_id: ${{ github.event.inputs.run_id || github.event.workflow_run.id }} + + - uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 # v1.8.8 + if: github.event_name == 'workflow_run' || (github.event_name == 'workflow_dispatch' && github.event.inputs.environment == 'pypi') + with: + packages_dir: dist/ + + - uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 # v1.8.8 + if: github.event_name == 'workflow_dispatch' && github.event.inputs.environment == 'testpypi' + with: + repository_url: https://test.pypi.org/legacy/ + packages_dir: dist/ diff --git a/.github/workflows/wheel-builder.yml b/.github/workflows/wheel-builder.yml new file mode 100644 index 0000000..0c46fea --- /dev/null +++ b/.github/workflows/wheel-builder.yml @@ -0,0 +1,44 @@ +# This is based on pyca/cryptography but we use cibuildwheel +# https://github.com/pyca/cryptography/blob/50ae9623df9181e5d08bbca0791ae69af4d3d446/.github/workflows/wheel-builder.yml +name: Wheel Builder + +permissions: + contents: read +on: + workflow_dispatch: + inputs: + version: + description: The version (tag) to build + # Do not add any non-tag push events without updating pypi-publish.yml. If + # you do, it'll upload wheels to PyPI. + push: + tags: + - "*" + pull_request: + paths: + - .github/workflows/wheel-builder.yml + - setup.py + - pyproject.toml + +jobs: + build_wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-20.04, windows-2019, macos-11] + + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + # The tag to build or the tag received by the tag event + ref: ${{ github.event.inputs.version || github.ref }} + persist-credentials: false + + - name: Build wheels + uses: pypa/cibuildwheel@f21bb8376a051ffb6cb5604b28ccaef7b90e8ab7 # v2.14.1 + + - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + with: + path: ./wheelhouse/*.whl diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml deleted file mode 100644 index 2dadef9..0000000 --- a/.github/workflows/wheels.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: Build - -on: [push, pull_request] - -jobs: - build_wheels: - name: Build wheels on ${{ matrix.os }} - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-20.04, windows-2019, macos-11] - - steps: - - uses: actions/checkout@v3 - - - name: Build wheels - uses: pypa/cibuildwheel@v2.14.1 - env: - CIBW_BUILD_VERBOSITY: "1" - - - uses: actions/upload-artifact@v3 - with: - path: ./wheelhouse/*.whl From 6142367189fd82db29a90823ee6240881d7ad1ae Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 27 Jul 2023 00:02:51 +0200 Subject: [PATCH 45/70] Cannot build Linux aarch64 wheels on GitHub runners --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 9f6dffb..e9d2254 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,4 +55,4 @@ skip = "pp*" archs = ["x86_64", "universal2", "arm64"] [tool.cibuildwheel.linux] -archs = ["x86_64", "aarch64"] +archs = ["x86_64"] From 361890c202138cbf16c147a9b1ddf62f120cef03 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 27 Jul 2023 00:52:36 +0200 Subject: [PATCH 46/70] Move Cython source out of package We don't want _speedups.pyx or _speedups.c to be included in the wheel. I've also excluded speedups.c from the sdist since that's generated by Cython --- .gitignore | 2 +- MANIFEST.in | 3 ++- setup.py | 2 +- src/{logbook/_speedups.pyx => cython/speedups.pyx} | 0 4 files changed, 4 insertions(+), 3 deletions(-) rename src/{logbook/_speedups.pyx => cython/speedups.pyx} (100%) diff --git a/.gitignore b/.gitignore index 8452c2a..9e10a27 100644 --- a/.gitignore +++ b/.gitignore @@ -60,7 +60,7 @@ target/ # Logbook specific / custom ignores .ropeproject -src/logbook/_speedups.c +src/cython/speedups.c env* .vagrant flycheck-* diff --git a/MANIFEST.in b/MANIFEST.in index 19fd092..267d0e9 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,2 +1,3 @@ -include MANIFEST.in Makefile CHANGES src/logbook/_speedups.pyx tox.ini LICENSE +include MANIFEST.in Makefile CHANGES src/cython/speedups.pyx tox.ini LICENSE +exclude src/cython/speedups.c recursive-include tests * diff --git a/setup.py b/setup.py index 1069bfb..a4042d5 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ def status_msgs(*msgs): from Cython.Build import cythonize ext_modules = cythonize( - [Extension("logbook._speedups", sources=["src/logbook/_speedups.pyx"])], + [Extension("logbook._speedups", sources=["src/cython/speedups.pyx"])], language_level=3, ) diff --git a/src/logbook/_speedups.pyx b/src/cython/speedups.pyx similarity index 100% rename from src/logbook/_speedups.pyx rename to src/cython/speedups.pyx From f338d06ad1921a4f3641cd61b94669ca8da3b2b1 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 27 Jul 2023 00:56:50 +0200 Subject: [PATCH 47/70] General MANIFEST.in improvement I tested the output of python -m build to direct these changes. --- MANIFEST.in | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index 267d0e9..642f112 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,6 @@ include MANIFEST.in Makefile CHANGES src/cython/speedups.pyx tox.ini LICENSE exclude src/cython/speedups.c -recursive-include tests * +graft benchmark +graft docs +graft tests +global-exclude *.pyc From c3fa44f82b0641ea1378fa43fe64fae563f3a649 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 27 Jul 2023 01:10:53 +0200 Subject: [PATCH 48/70] Build sdist --- .github/workflows/wheel-builder.yml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/.github/workflows/wheel-builder.yml b/.github/workflows/wheel-builder.yml index 0c46fea..ab67890 100644 --- a/.github/workflows/wheel-builder.yml +++ b/.github/workflows/wheel-builder.yml @@ -21,6 +21,26 @@ on: - pyproject.toml jobs: + sdist: + name: Build sdist + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # v3.5.3 + with: + # The tag to build or the tag received by the tag event + ref: ${{ github.event.inputs.version || github.ref }} + persist-credentials: false + + - run: python -m venv .venv + - name: Install Python dependencies + run: .venv/bin/pip install -U pip build + - name: Make sdist + run: .venv/bin/python -m build --sdist + - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + with: + name: sdist + path: dist/*.tar.gz + build_wheels: name: Build wheels on ${{ matrix.os }} runs-on: ${{ matrix.os }} @@ -41,4 +61,5 @@ jobs: - uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 with: + name: wheels path: ./wheelhouse/*.whl From a2f941e2afe82c67013e8a6385d8b71384b704f3 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 27 Jul 2023 01:18:58 +0200 Subject: [PATCH 49/70] Add better names to pypi-publish jobs --- .github/workflows/pypi-publish.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pypi-publish.yml b/.github/workflows/pypi-publish.yml index 894a2d0..f3ec587 100644 --- a/.github/workflows/pypi-publish.yml +++ b/.github/workflows/pypi-publish.yml @@ -28,17 +28,20 @@ jobs: permissions: id-token: write steps: - - uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615 # v2.27.0 + - name: Download artifacts + uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615 # v2.27.0 with: path: dist/ run_id: ${{ github.event.inputs.run_id || github.event.workflow_run.id }} - - uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 # v1.8.8 + - name: Publish to pypi.org + uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 # v1.8.8 if: github.event_name == 'workflow_run' || (github.event_name == 'workflow_dispatch' && github.event.inputs.environment == 'pypi') with: packages_dir: dist/ - - uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 # v1.8.8 + - name: Publish to test.pypi.org + uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 # v1.8.8 if: github.event_name == 'workflow_dispatch' && github.event.inputs.environment == 'testpypi' with: repository_url: https://test.pypi.org/legacy/ From 9be82c638f702edf9ca3d52cc5004d1487075f8e Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 27 Jul 2023 01:19:55 +0200 Subject: [PATCH 50/70] Update deprecated inputs to pypa/gh-action-pypi-publish --- .github/workflows/pypi-publish.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pypi-publish.yml b/.github/workflows/pypi-publish.yml index f3ec587..67ea505 100644 --- a/.github/workflows/pypi-publish.yml +++ b/.github/workflows/pypi-publish.yml @@ -38,11 +38,11 @@ jobs: uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 # v1.8.8 if: github.event_name == 'workflow_run' || (github.event_name == 'workflow_dispatch' && github.event.inputs.environment == 'pypi') with: - packages_dir: dist/ + packages-dir: dist/ - name: Publish to test.pypi.org uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 # v1.8.8 if: github.event_name == 'workflow_dispatch' && github.event.inputs.environment == 'testpypi' with: - repository_url: https://test.pypi.org/legacy/ - packages_dir: dist/ + repository-url: https://test.pypi.org/legacy/ + packages-dir: dist/ From e363365e3833ad2649f28bfdd8e22b524fed6cf6 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 27 Jul 2023 01:32:50 +0200 Subject: [PATCH 51/70] Delete twitter-secrets.txt --- twitter-secrets.txt | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 twitter-secrets.txt diff --git a/twitter-secrets.txt b/twitter-secrets.txt deleted file mode 100644 index 2a695b0..0000000 --- a/twitter-secrets.txt +++ /dev/null @@ -1,11 +0,0 @@ -Leaked Twitter Secrets - -Twitter for Android - xauth: yes - key: 3nVuSoBZnx6U4vzUxf5w - secret: Bcs59EFbbsdF6Sl9Ng71smgStWEGwXXKSjYvPVt7qys - -Echofon: - xauth: yes - key: yqoymTNrS9ZDGsBnlFhIuw - secret: OMai1whT3sT3XMskI7DZ7xiju5i5rAYJnxSEHaKYvEs From 3e9b3d4e3cb4b3c4dac3c1078bb982ec6a3435ca Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 27 Jul 2023 01:33:12 +0200 Subject: [PATCH 52/70] Delete make-release script Git tags show it isn't used anymore --- scripts/make-release.py | 159 ---------------------------------------- 1 file changed, 159 deletions(-) delete mode 100644 scripts/make-release.py diff --git a/scripts/make-release.py b/scripts/make-release.py deleted file mode 100644 index ae1a42b..0000000 --- a/scripts/make-release.py +++ /dev/null @@ -1,159 +0,0 @@ -#!/usr/bin/env python -""" - make-release - ~~~~~~~~~~~~ - - Helper script that performs a release. Does pretty much everything - automatically for us. - - :copyright: (c) 2011 by Armin Ronacher. - :license: BSD, see LICENSE for more details. -""" -import argparse -import os -import re -import sys -from datetime import date, datetime -from subprocess import PIPE, Popen - -_date_clean_re = re.compile(r"(\d+)(st|nd|rd|th)") - - -def parse_changelog(): - with open("CHANGES") as f: - lineiter = iter(f) - for line in lineiter: - match = re.search(r"^Version\s+(.*)", line.strip()) - if match is None: - continue - version = match.group(1).strip() - if lineiter.next().count("-") != len(match.group(0)): - continue - while 1: - change_info = lineiter.next().strip() - if change_info: - break - - match = re.search( - r"released on (\w+\s+\d+\w+\s+\d+)" r"(?:, codename (.*))?(?i)", - change_info, - ) - if match is None: - continue - - datestr, codename = match.groups() - return version, parse_date(datestr), codename - - -def bump_version(version): - try: - parts = map(int, version.split(".")) - except ValueError: - fail("Current version is not numeric") - parts[-1] += 1 - return ".".join(map(str, parts)) - - -def parse_date(string): - string = _date_clean_re.sub(r"\1", string) - return datetime.strptime(string, "%B %d %Y") - - -def set_filename_version(filename, version_number, pattern): - changed = [] - - def inject_version(match): - before, old, after = match.groups() - changed.append(True) - return before + version_number + after - - with open(filename) as f: - contents = re.sub( - r"^(\s*%s\s*=\s*')(.+?)(')(?sm)" % pattern, inject_version, f.read() - ) - - if not changed: - fail("Could not find %s in %s", pattern, filename) - - with open(filename, "w") as f: - f.write(contents) - - -def set_version(version): - info("Setting version to %s", version) - with open("logbook/__version__.py", "w") as f: - f.write(f"__version__ = {version!r}") - - -def fail(message, *args): - print >> sys.stderr, "Error:", message % args - sys.exit(1) - - -def info(message, *args): - print >> sys.stderr, message % args - - -def get_git_tags(): - return set(Popen(["git", "tag"], stdout=PIPE).communicate()[0].splitlines()) - - -def git_is_clean(): - return Popen(["git", "diff", "--quiet"]).wait() == 0 - - -def make_git_commit(message, *args): - message = message % args - Popen(["git", "commit", "-am", message]).wait() - - -def make_git_tag(tag): - info('Tagging "%s"', tag) - Popen(["git", "tag", tag]).wait() - - -parser = argparse.ArgumentParser("%prog [options]") -parser.add_argument("--no-upload", dest="upload", action="store_false", default=True) - - -def main(): - args = parser.parse_args() - - os.chdir(os.path.join(os.path.dirname(__file__), "..")) - - rv = parse_changelog() - if rv is None: - fail("Could not parse changelog") - - version, release_date, codename = rv - dev_version = bump_version(version) + "-dev" - - info( - "Releasing %s (codename %s, release date %s)", - version, - codename, - release_date.strftime("%d/%m/%Y"), - ) - tags = get_git_tags() - - if version in tags: - fail('Version "%s" is already tagged', version) - if release_date.date() != date.today(): - fail( - "Release date is not today ({} != {})".format( - release_date.date(), date.today() - ) - ) - - if not git_is_clean(): - fail("You have uncommitted changes in git") - - set_version(version) - make_git_commit("Bump version number to %s", version) - make_git_tag(version) - set_version(dev_version) - make_git_commit("Bump version number to %s", dev_version) - - -if __name__ == "__main__": - main() From fda35fde398aa3325da26092eb887707db6185d5 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 27 Jul 2023 01:34:47 +0200 Subject: [PATCH 53/70] Make benchmarks run on Python 3 --- benchmark/bench_logger_level_low.py | 2 +- benchmark/bench_logging_logger_level_low.py | 3 +-- benchmark/bench_logging_noop.py | 3 +-- benchmark/bench_logging_noop_filter.py | 3 +-- benchmark/bench_logging_stream_handler.py | 3 +-- benchmark/bench_noop.py | 2 +- benchmark/bench_noop_filter.py | 2 +- benchmark/bench_noop_filter_on_handler.py | 2 +- benchmark/bench_redirect_from_logging.py | 3 +-- benchmark/bench_redirect_to_logging.py | 3 +-- benchmark/bench_stack_manipulation.py | 3 +-- benchmark/bench_stream_handler.py | 2 +- 12 files changed, 12 insertions(+), 19 deletions(-) diff --git a/benchmark/bench_logger_level_low.py b/benchmark/bench_logger_level_low.py index 53eba81..1ec47b4 100644 --- a/benchmark/bench_logger_level_low.py +++ b/benchmark/bench_logger_level_low.py @@ -1,5 +1,5 @@ """Benchmarks too low logger levels""" -from cStringIO import StringIO +from io import StringIO from logbook import ERROR, Logger, StreamHandler diff --git a/benchmark/bench_logging_logger_level_low.py b/benchmark/bench_logging_logger_level_low.py index 4005d1d..4e018ad 100644 --- a/benchmark/bench_logging_logger_level_low.py +++ b/benchmark/bench_logging_logger_level_low.py @@ -1,8 +1,7 @@ """Tests with a logging handler becoming a noop for comparison""" +from io import StringIO from logging import ERROR, StreamHandler, getLogger -from cStringIO import StringIO - log = getLogger("Testlogger") log.setLevel(ERROR) diff --git a/benchmark/bench_logging_noop.py b/benchmark/bench_logging_noop.py index 35a9716..ad942e1 100644 --- a/benchmark/bench_logging_noop.py +++ b/benchmark/bench_logging_noop.py @@ -1,8 +1,7 @@ """Tests with a logging handler becoming a noop for comparison""" +from io import StringIO from logging import ERROR, StreamHandler, getLogger -from cStringIO import StringIO - log = getLogger("Testlogger") diff --git a/benchmark/bench_logging_noop_filter.py b/benchmark/bench_logging_noop_filter.py index 5fb3eb9..13a6de5 100644 --- a/benchmark/bench_logging_noop_filter.py +++ b/benchmark/bench_logging_noop_filter.py @@ -1,8 +1,7 @@ """Tests with a filter disabling a handler for comparsion in logging""" +from io import StringIO from logging import Filter, StreamHandler, getLogger -from cStringIO import StringIO - log = getLogger("Testlogger") diff --git a/benchmark/bench_logging_stream_handler.py b/benchmark/bench_logging_stream_handler.py index ede4037..7935b80 100644 --- a/benchmark/bench_logging_stream_handler.py +++ b/benchmark/bench_logging_stream_handler.py @@ -1,8 +1,7 @@ """Tests the stream handler in logging""" +from io import StringIO from logging import Logger, StreamHandler -from cStringIO import StringIO - log = Logger("Test logger") diff --git a/benchmark/bench_noop.py b/benchmark/bench_noop.py index 1b4291e..3b9f6ba 100644 --- a/benchmark/bench_noop.py +++ b/benchmark/bench_noop.py @@ -1,5 +1,5 @@ """Test with no handler active""" -from cStringIO import StringIO +from io import StringIO from logbook import ERROR, Logger, NullHandler, StreamHandler diff --git a/benchmark/bench_noop_filter.py b/benchmark/bench_noop_filter.py index 264dcb6..38a7be0 100644 --- a/benchmark/bench_noop_filter.py +++ b/benchmark/bench_noop_filter.py @@ -1,4 +1,4 @@ -from cStringIO import StringIO +from io import StringIO from logbook import Logger, NullHandler, StreamHandler diff --git a/benchmark/bench_noop_filter_on_handler.py b/benchmark/bench_noop_filter_on_handler.py index 8b43b7c..255a42c 100644 --- a/benchmark/bench_noop_filter_on_handler.py +++ b/benchmark/bench_noop_filter_on_handler.py @@ -1,5 +1,5 @@ """Like the filter test, but with the should_handle implemented""" -from cStringIO import StringIO +from io import StringIO from logbook import Logger, NullHandler, StreamHandler diff --git a/benchmark/bench_redirect_from_logging.py b/benchmark/bench_redirect_from_logging.py index 4f7f4ac..1b4ab14 100644 --- a/benchmark/bench_redirect_from_logging.py +++ b/benchmark/bench_redirect_from_logging.py @@ -1,8 +1,7 @@ """Tests redirects from logging to logbook""" +from io import StringIO from logging import getLogger -from cStringIO import StringIO - from logbook import StreamHandler from logbook.compat import redirect_logging diff --git a/benchmark/bench_redirect_to_logging.py b/benchmark/bench_redirect_to_logging.py index b7f5a85..b19a296 100644 --- a/benchmark/bench_redirect_to_logging.py +++ b/benchmark/bench_redirect_to_logging.py @@ -1,8 +1,7 @@ """Tests redirects from logging to logbook""" +from io import StringIO from logging import StreamHandler, getLogger -from cStringIO import StringIO - from logbook.compat import LoggingHandler log = getLogger("Test logger") diff --git a/benchmark/bench_stack_manipulation.py b/benchmark/bench_stack_manipulation.py index f923a0a..671262a 100644 --- a/benchmark/bench_stack_manipulation.py +++ b/benchmark/bench_stack_manipulation.py @@ -1,8 +1,7 @@ """Tests basic stack manipulation performance""" +from io import StringIO from tempfile import NamedTemporaryFile -from cStringIO import StringIO - from logbook import ERROR, WARNING, FileHandler, Handler, NullHandler, StreamHandler diff --git a/benchmark/bench_stream_handler.py b/benchmark/bench_stream_handler.py index a82b8f0..f3ae875 100644 --- a/benchmark/bench_stream_handler.py +++ b/benchmark/bench_stream_handler.py @@ -1,5 +1,5 @@ """Tests the stream handler""" -from cStringIO import StringIO +from io import StringIO from logbook import Logger, StreamHandler From a78773eda13a03bc9d466468d40792fbf0355f26 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Thu, 27 Jul 2023 01:35:57 +0200 Subject: [PATCH 54/70] Delete Vagrantfile The listed Python versions say it all ;-) --- Vagrantfile | 25 ------------------------- 1 file changed, 25 deletions(-) delete mode 100644 Vagrantfile diff --git a/Vagrantfile b/Vagrantfile deleted file mode 100644 index bacb464..0000000 --- a/Vagrantfile +++ /dev/null @@ -1,25 +0,0 @@ -# -*- mode: ruby -*- -# vi: set ft=ruby : -PYTHON_VERSIONS = ["python2.6", "python2.7", "python3.3"] - -Vagrant::Config.run do |config| - config.vm.define :box do |config| - config.vm.box = "precise64" - config.vm.box_url = "http://files.vagrantup.com/precise64.box" - config.vm.host_name = "box" - config.vm.provision :shell, :inline => "sudo apt-get -y update" - config.vm.provision :shell, :inline => "sudo apt-get install -y python-software-properties" - config.vm.provision :shell, :inline => "sudo add-apt-repository -y ppa:fkrull/deadsnakes" - config.vm.provision :shell, :inline => "sudo apt-get update" - PYTHON_VERSIONS.each { |python_version| - config.vm.provision :shell, :inline => "sudo apt-get install -y " + python_version + " " + python_version + "-dev" - } - config.vm.provision :shell, :inline => "sudo apt-get install -y libzmq-dev wget libbluetooth-dev libsqlite3-dev" - config.vm.provision :shell, :inline => "wget http://python-distribute.org/distribute_setup.py -O /tmp/distribute_setup.py" - PYTHON_VERSIONS.each { |python_executable| - config.vm.provision :shell, :inline => python_executable + " /tmp/distribute_setup.py" - } - config.vm.provision :shell, :inline => "sudo easy_install tox==1.2" - config.vm.provision :shell, :inline => "sudo easy_install virtualenv==1.6.4" - end -end From 765ed8d9798ba0f849dc5941704c98745b7ca0aa Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Fri, 28 Jul 2023 00:20:07 +0200 Subject: [PATCH 55/70] Use .readthedocs.yaml --- .readthedocs.yaml | 13 +++++++++++++ pyproject.toml | 2 +- tox.ini | 10 +++++----- 3 files changed, 19 insertions(+), 6 deletions(-) create mode 100644 .readthedocs.yaml diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..3f5023c --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,13 @@ +version: 2 + +build: + os: ubuntu-20.04 + tools: + python: "3.11" + +python: + install: + - method: pip + path: . + extra_requirements: + - docs diff --git a/pyproject.toml b/pyproject.toml index e9d2254..d6df1f3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,7 @@ jinja = ["Jinja2"] compression = ["brotli"] all = ["Logbook[execnet,sqlalchemy,redis,zmq,jinja,compression,nteventlog]"] nteventlog = ["pywin32; platform_system == 'Windows'"] - +docs = ["Sphinx", "importlib_metadata; python_version < '3.8'"] [tool.pytest.ini_options] testpaths = ["tests"] diff --git a/tox.ini b/tox.ini index 944ea11..4073795 100644 --- a/tox.ini +++ b/tox.ini @@ -17,9 +17,9 @@ commands = pytest {posargs} [testenv:docs] -basepython = python3.7 -deps = - Sphinx>=1.3 +basepython = python3.11 +extras = + docs changedir = docs commands = sphinx-build -W -b html . _build/html @@ -27,8 +27,8 @@ commands = [gh-actions] python = - 3.7: py37, docs + 3.7: py37 3.8: py38 3.9: py39 3.10: py310 - 3.11: py311 + 3.11: py311, docs From 387c1022386e78c3887859fbb0b4c7efc6319168 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Fri, 28 Jul 2023 00:21:29 +0200 Subject: [PATCH 56/70] Update project links --- docs/index.rst | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/index.rst b/docs/index.rst index a93d260..6b86e5e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -47,10 +47,9 @@ Project Information .. cssclass:: toctree-l1 * `Download from PyPI`_ -* `Master repository on GitHub`_ +* `GitHub repository`_ * `Mailing list`_ -* IRC: ``#pocoo`` on freenode .. _Download from PyPI: https://pypi.org/project/Logbook -.. _Master repository on GitHub: https://github.com/getlogbook/logbook +.. _GitHub repository: https://github.com/getlogbook/logbook .. _Mailing list: https://groups.google.com/g/pocoo-libs From a1a79ac0213841098aa95112a549d8f78d2708f7 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Fri, 28 Jul 2023 00:21:37 +0200 Subject: [PATCH 57/70] Simpler Sphinx config --- docs/conf.py | 227 ++++++--------------------------------------------- 1 file changed, 23 insertions(+), 204 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index ab487dc..00f8cc9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,232 +1,51 @@ +# Configuration file for the Sphinx documentation builder. # -# Logbook documentation build configuration file, created by -# sphinx-quickstart on Fri Jul 23 16:54:49 2010. -# -# This file is execfile()d with the current directory set to its containing -# dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html -import os import sys -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.extend((os.path.abspath("."), os.path.abspath(".."))) - -# -- General configuration ---------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx"] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix of source filenames. -source_suffix = ".rst" +if sys.version_info < (3, 8): + from importlib_metadata import distribution +else: + from importlib.metadata import distribution -# The encoding of source files. -# source_encoding = 'utf-8-sig' +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information -# The master toctree document. -master_doc = "index" - -# General information about the project. project = "Logbook" -copyright = "2010, Armin Ronacher, Georg Brandl" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -with open( - os.path.join(os.path.dirname(__file__), "../src/logbook/__version__.py") -) as version_file: - # can't use import here... - version = release = version_file.read().strip().split("=")[1].strip()[1:-1] - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' +project_copyright = "2010, Armin Ronacher, Georg Brandl" +version = release = distribution("Logbook").version -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.viewcode", +] -# The name of the Pygments (syntax highlighting) style to use. +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] pygments_style = "sphinx" -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output -# -- Options for HTML output -------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. html_theme = "sheet" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. html_theme_options = { "nosidebar": True, } - -# Add any paths that contain custom themes here, relative to this directory. html_theme_path = ["."] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". html_title = "Logbook" - -# A shorter title for the navigation bar. Default is the same as html_title. html_short_title = "Logbook " + release - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# html_add_permalinks = '' - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = '' - -# Output file base name for HTML help builder. -htmlhelp_basename = "Logbookdoc" - - -# -- Options for LaTeX output ------------------------------------------------- - -# The paper size ('letter' or 'a4'). -# latex_paper_size = 'letter' - -# The font size ('10pt', '11pt' or '12pt'). -# latex_font_size = '10pt' - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, -# documentclass [howto/manual]). -latex_documents = [ - ( - "index", - "Logbook.tex", - "Logbook Documentation", - "Armin Ronacher, Georg Brandl", - "manual", - ), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Additional stuff for the LaTeX preamble. -# latex_preamble = '' - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output ------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ("index", "logbook", "Logbook Documentation", ["Armin Ronacher, Georg Brandl"], 1) -] +# -- Extension configuration ------------------------------------------------- intersphinx_mapping = { "python": ("https://docs.python.org/3", None), From 2ab48eb0540290b630ccaa26520136fdfa75c353 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Fri, 28 Jul 2023 00:55:59 +0200 Subject: [PATCH 58/70] Fix docs build on latest Sphinx The templated part of the commented out HTML was still being rendered, and `logo` does not exist anymore. --- docs/sheet/layout.html | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/sheet/layout.html b/docs/sheet/layout.html index 1303614..33b71a1 100644 --- a/docs/sheet/layout.html +++ b/docs/sheet/layout.html @@ -11,7 +11,6 @@
From fa36a8e5bea99d1aa6e93448d31f47ec3b3c31f8 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Fri, 28 Jul 2023 01:23:23 +0200 Subject: [PATCH 59/70] Add missing changelogs for 1.5.2 and 1.5.3 --- CHANGES | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/CHANGES b/CHANGES index 84d28df..9461ca0 100644 --- a/CHANGES +++ b/CHANGES @@ -15,6 +15,20 @@ Released on - FileHandler supports path-like objects. - Fixed bug which prevented compilation on Cython 3 +Version 1.5.3 +------------- + +Released on October 16th, 2019 + +- Fixed deprecated imports from collections module. + +Version 1.5.2 +------------- + +Released on August 21st, 2019 + +- No changes + Version 1.5.1 ------------- From 467fd4e437272c4ec672e289aa04675116250bb6 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 30 Jul 2023 16:02:52 +0200 Subject: [PATCH 60/70] Use better description --- .github/workflows/wheel-builder.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/wheel-builder.yml b/.github/workflows/wheel-builder.yml index ab67890..0107404 100644 --- a/.github/workflows/wheel-builder.yml +++ b/.github/workflows/wheel-builder.yml @@ -8,7 +8,7 @@ on: workflow_dispatch: inputs: version: - description: The version (tag) to build + description: The Git ref to build # Do not add any non-tag push events without updating pypi-publish.yml. If # you do, it'll upload wheels to PyPI. push: From 2d471012239ffa9d1cc68d94a3bfd4448cd16c04 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 30 Jul 2023 16:15:40 +0200 Subject: [PATCH 61/70] Move artifacts to single directory for upload --- .github/workflows/pypi-publish.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pypi-publish.yml b/.github/workflows/pypi-publish.yml index 67ea505..852959c 100644 --- a/.github/workflows/pypi-publish.yml +++ b/.github/workflows/pypi-publish.yml @@ -31,9 +31,16 @@ jobs: - name: Download artifacts uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615 # v2.27.0 with: - path: dist/ + path: artifacts/ run_id: ${{ github.event.inputs.run_id || github.event.workflow_run.id }} + - name: Move artifacts to dist/ + run: | + ls -lR artifacts/ + mkdir dist + mv artifacts/sdist/*.tar.gz dist/ + mv artifacts/wheels/*.whl dist/ + - name: Publish to pypi.org uses: pypa/gh-action-pypi-publish@f8c70e705ffc13c3b4d1221169b84f12a75d6ca8 # v1.8.8 if: github.event_name == 'workflow_run' || (github.event_name == 'workflow_dispatch' && github.event.inputs.environment == 'pypi') From c37995de875d6b60c0140890f02df07090f161ba Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 30 Jul 2023 21:31:46 +0200 Subject: [PATCH 62/70] Resolve TODO about dropping Python 2 --- src/logbook/base.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/logbook/base.py b/src/logbook/base.py index 998f1dc..f9fb425 100644 --- a/src/logbook/base.py +++ b/src/logbook/base.py @@ -450,8 +450,7 @@ def __init__( #: where custom log processors can attach custom context sensitive #: data. - # TODO: Replace the lambda with str when we remove support for python 2 - self.extra = defaultdict(lambda: "", extra or ()) + self.extra = defaultdict(str, extra or ()) #: If available, optionally the interpreter frame that pulled the #: heavy init. This usually points to somewhere in the dispatcher. #: Might not be available for all calls and is removed when the log @@ -559,8 +558,7 @@ def update_from_dict(self, d): if isinstance(self.time, str): self.time = parse_iso8601(self.time) - # TODO: Replace the lambda with str when we remove support for python 2` - self.extra = defaultdict(lambda: "", self.extra) + self.extra = defaultdict(str, self.extra) return self def _format_message(self, msg, *args, **kwargs): From 19d57f820bdc95202dc3f1177cb7c04c76d8cdba Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 30 Jul 2023 21:35:19 +0200 Subject: [PATCH 63/70] Use datetime.timestamp I note that LoggingHandler has a bug if set_datetime_format("local") is used. Maybe in Logbook 2 we can get ride of the naive-means-UTC default we currently have. --- src/logbook/compat.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/logbook/compat.py b/src/logbook/compat.py index f3fc2a8..c79fef1 100644 --- a/src/logbook/compat.py +++ b/src/logbook/compat.py @@ -12,12 +12,10 @@ import sys import warnings from collections.abc import Mapping -from datetime import date, datetime +from datetime import date, datetime, timezone import logbook -_epoch_ord = date(1970, 1, 1).toordinal() - def redirect_logging(set_root_logger_level=True): """Permanently redirects logging to the stdlib. This also @@ -213,12 +211,10 @@ def convert_level(self, level): def convert_time(self, dt): """Converts a datetime object into a timestamp.""" - year, month, day, hour, minute, second = dt.utctimetuple()[:6] - days = date(year, month, 1).toordinal() - _epoch_ord + day - 1 - hours = days * 24 + hour - minutes = hours * 60 + minute - seconds = minutes * 60 + second - return seconds + if dt.tzinfo is None: + # Logbook uses naive datetimes to represent UTC (utcnow) + return dt.replace(tzinfo=timezone.utc).timestamp() + return dt.timestamp() def convert_record(self, old_record): """Converts a record from logbook to logging.""" From 085a5eddc555f555113303894b527c459788ea79 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 30 Jul 2023 21:35:59 +0200 Subject: [PATCH 64/70] Use importlib --- tests/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/utils.py b/tests/utils.py index f7c9443..bde3b96 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -6,6 +6,7 @@ :license: BSD, see LICENSE for more details. """ import functools +import importlib import os import sys from contextlib import contextmanager @@ -40,7 +41,7 @@ def get_total_delta_seconds(delta): def require_module(module_name): found = True try: - __import__(module_name) + importlib.import_module(module_name) except ImportError: found = False From 7e90e9c593edc6a327595d46c699fa240f2b375d Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 30 Jul 2023 21:37:29 +0200 Subject: [PATCH 65/70] Use timedelta.total_seconds() --- tests/test_logging_times.py | 6 ++---- tests/utils.py | 10 ---------- 2 files changed, 2 insertions(+), 14 deletions(-) diff --git a/tests/test_logging_times.py b/tests/test_logging_times.py index 006572b..a27bf06 100644 --- a/tests/test_logging_times.py +++ b/tests/test_logging_times.py @@ -4,8 +4,6 @@ import logbook -from .utils import get_total_delta_seconds - def test_timedate_format(activation_strategy, logger): """ @@ -29,7 +27,7 @@ def test_timedate_format(activation_strategy, logger): t1 = datetime.now() t2 = datetime.utcnow() - tz_minutes_diff = get_total_delta_seconds(t1 - t2) / 60.0 + tz_minutes_diff = (t1 - t2).total_seconds() / 60.0 if abs(tz_minutes_diff) < 1: pytest.skip( @@ -38,7 +36,7 @@ def test_timedate_format(activation_strategy, logger): ) # get the difference between LogRecord local and utc times - logbook_minutes_diff = get_total_delta_seconds(time_local - time_utc) / 60.0 + logbook_minutes_diff = (time_local - time_utc).total_seconds() / 60.0 assert abs(logbook_minutes_diff) > 1, ( "Localtime does not differ from UTC by more than 1 " "minute (Local: %s, UTC: %s)" % (time_local, time_utc) diff --git a/tests/utils.py b/tests/utils.py index bde3b96..7411fc4 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -21,16 +21,6 @@ LETTERS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" -def get_total_delta_seconds(delta): - """ - Replacement for datetime.timedelta.total_seconds() for Python 2.5, 2.6 - and 3.1 - """ - return ( - delta.microseconds + (delta.seconds + delta.days * 24 * 3600) * 10**6 - ) / 10**6 - - appveyor = pytest.mark.skipif( os.environ.get("APPVEYOR") != "True", reason="AppVeyor CI test" ) From d755c7383cdbe5eb7951dd455a86f79cdc3092a1 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 30 Jul 2023 21:37:41 +0200 Subject: [PATCH 66/70] Remove unused pytest markers --- tests/utils.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/utils.py b/tests/utils.py index 7411fc4..277d9be 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -7,7 +7,6 @@ """ import functools import importlib -import os import sys from contextlib import contextmanager from io import StringIO @@ -21,13 +20,6 @@ LETTERS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" -appveyor = pytest.mark.skipif( - os.environ.get("APPVEYOR") != "True", reason="AppVeyor CI test" -) - -travis = pytest.mark.skipif(os.environ.get("TRAVIS") != "true", reason="Travis CI test") - - def require_module(module_name): found = True try: From 6c54e0fb69e1b268b077862cb7c59a9ab49b3c0a Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 30 Jul 2023 21:37:59 +0200 Subject: [PATCH 67/70] Fix implicit concatenation on same line after black reformat --- tests/test_unicode.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_unicode.py b/tests/test_unicode.py index 487d405..3a503c7 100644 --- a/tests/test_unicode.py +++ b/tests/test_unicode.py @@ -26,7 +26,7 @@ def test_default_format_bad_encoding(logger): def test_custom_unicode_format_unicode(logger): - format_string = "[{record.level_name}] " "{record.channel}: {record.message}" + format_string = "[{record.level_name}] {record.channel}: {record.message}" with capturing_stderr_context() as stream: with logbook.StderrHandler(format_string=format_string): logger.warn("\u2603") @@ -34,7 +34,7 @@ def test_custom_unicode_format_unicode(logger): def test_custom_string_format_unicode(logger): - format_string = "[{record.level_name}] " "{record.channel}: {record.message}" + format_string = "[{record.level_name}] {record.channel}: {record.message}" with capturing_stderr_context() as stream: with logbook.StderrHandler(format_string=format_string): logger.warn("\u2603") From f8d59764b5eefe9de2e08e678db69cd69872dc36 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 30 Jul 2023 16:30:46 +0200 Subject: [PATCH 68/70] Update CHANGES --- CHANGES | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGES b/CHANGES index 9461ca0..1415358 100644 --- a/CHANGES +++ b/CHANGES @@ -14,6 +14,7 @@ Released on - exc_info arg may be a BaseException instance (thanks Mattijs Ugen) - FileHandler supports path-like objects. - Fixed bug which prevented compilation on Cython 3 +- Wheels are generated for more platforms and architectures Version 1.5.3 ------------- From 011cfc27ac1865d8403503e863b21aabc13cfad0 Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 30 Jul 2023 21:56:14 +0200 Subject: [PATCH 69/70] Add release date --- CHANGES | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES b/CHANGES index 1415358..296cf89 100644 --- a/CHANGES +++ b/CHANGES @@ -4,7 +4,7 @@ Logbook Changelog Version 1.6.0 ------------- -Released on +Released on July 30th, 2023 - Dropped support for Python 2.7, 3.5, and 3.6. - Uses pyproject.toml based build. From 18d97a415516c90bdb7d96eb86e69a43b74942fb Mon Sep 17 00:00:00 2001 From: Frazer McLean Date: Sun, 30 Jul 2023 21:56:20 +0200 Subject: [PATCH 70/70] Bump version --- src/logbook/__version__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/logbook/__version__.py b/src/logbook/__version__.py index a06ff4e..e4adfb8 100644 --- a/src/logbook/__version__.py +++ b/src/logbook/__version__.py @@ -1 +1 @@ -__version__ = "1.5.3" +__version__ = "1.6.0"