From 5f85723f4693c7289724fdcda84cfc0b62da74d4 Mon Sep 17 00:00:00 2001 From: Alexandre Morignot Date: Tue, 28 Feb 2023 18:40:55 +0100 Subject: [PATCH 01/12] fix(JsonFormatter): type the constructor (#170) --- src/pythonjsonlogger/jsonlogger.py | 149 ++++++++++++++++++----------- tox.ini | 2 +- 2 files changed, 95 insertions(+), 56 deletions(-) diff --git a/src/pythonjsonlogger/jsonlogger.py b/src/pythonjsonlogger/jsonlogger.py index e250c7e..519a64d 100644 --- a/src/pythonjsonlogger/jsonlogger.py +++ b/src/pythonjsonlogger/jsonlogger.py @@ -1,15 +1,14 @@ -''' +""" This library is provided to allow standard python logging to output log data as JSON formatted strings -''' +""" import logging import json import re -from datetime import date, datetime, time, timezone import traceback import importlib - -from typing import Any, Dict, Optional, Union, List, Tuple +from datetime import date, datetime, time, timezone +from typing import Any, Callable, Dict, List, Optional, Tuple, Union from inspect import istraceback @@ -18,18 +17,38 @@ # skip natural LogRecord attributes # http://docs.python.org/library/logging.html#logrecord-attributes RESERVED_ATTRS: Tuple[str, ...] = ( - 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename', - 'funcName', 'levelname', 'levelno', 'lineno', 'module', - 'msecs', 'message', 'msg', 'name', 'pathname', 'process', - 'processName', 'relativeCreated', 'stack_info', 'thread', 'threadName') - + "args", + "asctime", + "created", + "exc_info", + "exc_text", + "filename", + "funcName", + "levelname", + "levelno", + "lineno", + "module", + "msecs", + "message", + "msg", + "name", + "pathname", + "process", + "processName", + "relativeCreated", + "stack_info", + "thread", + "threadName", +) + +OptionalCallableOrStr = Optional[Union[Callable, str]] def merge_record_extra( record: logging.LogRecord, target: Dict, reserved: Union[Dict, List], - rename_fields: Optional[Dict[str,str]] = None, + rename_fields: Optional[Dict[str, str]] = None, ) -> Dict: """ Merges extra attributes from LogRecord object into target dictionary @@ -44,10 +63,10 @@ def merge_record_extra( rename_fields = {} for key, value in record.__dict__.items(): # this allows to have numeric keys - if (key not in reserved - and not (hasattr(key, "startswith") - and key.startswith('_'))): - target[rename_fields.get(key,key)] = value + if key not in reserved and not ( + hasattr(key, "startswith") and key.startswith("_") + ): + target[rename_fields.get(key, key)] = value return target @@ -61,11 +80,9 @@ def default(self, obj): return self.format_datetime_obj(obj) elif istraceback(obj): - return ''.join(traceback.format_tb(obj)).strip() + return "".join(traceback.format_tb(obj)).strip() - elif type(obj) == Exception \ - or isinstance(obj, Exception) \ - or type(obj) == type: + elif type(obj) == Exception or isinstance(obj, Exception) or type(obj) == type: return str(obj) try: @@ -89,22 +106,34 @@ class JsonFormatter(logging.Formatter): json default encoder """ - def __init__(self, *args, **kwargs): + def __init__( + self, + *args: Any, + json_default: OptionalCallableOrStr = None, + json_encoder: OptionalCallableOrStr = None, + json_serialiser: Union[Callable, str] = json.dumps, + json_indent: Optional[Union[int, str]] = None, + json_ensure_ascii: bool = True, + prefix: str = "", + rename_fields: Optional[dict] = None, + static_fields: Optional[dict] = None, + reserved_attrs: Tuple[str, ...] = RESERVED_ATTRS, + timestamp: Union[bool, str] = False, + **kwargs: Any + ): """ :param json_default: a function for encoding non-standard objects as outlined in https://docs.python.org/3/library/json.html :param json_encoder: optional custom encoder :param json_serializer: a :meth:`json.dumps`-compatible callable that will be used to serialize the log record. - :param json_indent: an optional :meth:`json.dumps`-compatible numeric value - that will be used to customize the indent of the output json. + :param json_indent: indent parameter for json.dumps + :param json_ensure_ascii: ensure_ascii parameter for json.dumps :param prefix: an optional string prefix added at the beginning of the formatted string :param rename_fields: an optional dict, used to rename field names in the output. Rename message to @message: {'message': '@message'} :param static_fields: an optional dict, used to add fields with static values to all logs - :param json_indent: indent parameter for json.dumps - :param json_ensure_ascii: ensure_ascii parameter for json.dumps :param reserved_attrs: an optional list of fields that will be skipped when outputting json log record. Defaults to all log record attributes: http://docs.python.org/library/logging.html#logrecord-attributes @@ -113,17 +142,16 @@ def __init__(self, *args, **kwargs): to log record using string as key. If True boolean is passed, timestamp key will be "timestamp". Defaults to False/off. """ - self.json_default = self._str_to_fn(kwargs.pop("json_default", None)) - self.json_encoder = self._str_to_fn(kwargs.pop("json_encoder", None)) - self.json_serializer = self._str_to_fn(kwargs.pop("json_serializer", json.dumps)) - self.json_indent = kwargs.pop("json_indent", None) - self.json_ensure_ascii = kwargs.pop("json_ensure_ascii", True) - self.prefix = kwargs.pop("prefix", "") - self.rename_fields = kwargs.pop("rename_fields", {}) - self.static_fields = kwargs.pop("static_fields", {}) - reserved_attrs = kwargs.pop("reserved_attrs", RESERVED_ATTRS) + self.json_default = self._str_to_fn(json_default) + self.json_encoder = self._str_to_fn(json_encoder) + self.json_serializer = self._str_to_fn(json_serialiser) + self.json_indent = json_indent + self.json_ensure_ascii = json_ensure_ascii + self.prefix = prefix + self.rename_fields = rename_fields or {} + self.static_fields = static_fields or {} self.reserved_attrs = dict(zip(reserved_attrs, reserved_attrs)) - self.timestamp = kwargs.pop("timestamp", False) + self.timestamp = timestamp # super(JsonFormatter, self).__init__(*args, **kwargs) logging.Formatter.__init__(self, *args, **kwargs) @@ -131,8 +159,7 @@ def __init__(self, *args, **kwargs): self.json_encoder = JsonEncoder self._required_fields = self.parse() - self._skip_fields = dict(zip(self._required_fields, - self._required_fields)) + self._skip_fields = dict(zip(self._required_fields, self._required_fields)) self._skip_fields.update(self.reserved_attrs) def _str_to_fn(self, fn_as_str): @@ -146,7 +173,7 @@ def _str_to_fn(self, fn_as_str): if not isinstance(fn_as_str, str): return fn_as_str - path, _, function = fn_as_str.rpartition('.') + path, _, function = fn_as_str.rpartition(".") module = importlib.import_module(path) return getattr(module, function) @@ -158,22 +185,27 @@ def parse(self) -> List[str]: to include in all log messages. """ if isinstance(self._style, logging.StringTemplateStyle): - formatter_style_pattern = re.compile(r'\$\{(.+?)\}', re.IGNORECASE) + formatter_style_pattern = re.compile(r"\$\{(.+?)\}", re.IGNORECASE) elif isinstance(self._style, logging.StrFormatStyle): - formatter_style_pattern = re.compile(r'\{(.+?)\}', re.IGNORECASE) + formatter_style_pattern = re.compile(r"\{(.+?)\}", re.IGNORECASE) # PercentStyle is parent class of StringTemplateStyle and StrFormatStyle so # it needs to be checked last. elif isinstance(self._style, logging.PercentStyle): - formatter_style_pattern = re.compile(r'%\((.+?)\)', re.IGNORECASE) + formatter_style_pattern = re.compile(r"%\((.+?)\)", re.IGNORECASE) else: - raise ValueError('Invalid format: %s' % self._fmt) + raise ValueError("Invalid format: %s" % self._fmt) if self._fmt: return formatter_style_pattern.findall(self._fmt) else: return [] - def add_fields(self, log_record: Dict[str, Any], record: logging.LogRecord, message_dict: Dict[str, Any]) -> None: + def add_fields( + self, + log_record: Dict[str, Any], + record: logging.LogRecord, + message_dict: Dict[str, Any], + ) -> None: """ Override this method to implement custom logic for adding fields. """ @@ -182,10 +214,15 @@ def add_fields(self, log_record: Dict[str, Any], record: logging.LogRecord, mess log_record.update(self.static_fields) log_record.update(message_dict) - merge_record_extra(record, log_record, reserved=self._skip_fields, rename_fields=self.rename_fields) + merge_record_extra( + record, + log_record, + reserved=self._skip_fields, + rename_fields=self.rename_fields, + ) if self.timestamp: - key = self.timestamp if type(self.timestamp) == str else 'timestamp' + key = self.timestamp if type(self.timestamp) == str else "timestamp" log_record[key] = datetime.fromtimestamp(record.created, tz=timezone.utc) self._perform_rename_log_fields(log_record) @@ -204,11 +241,13 @@ def process_log_record(self, log_record): def jsonify_log_record(self, log_record): """Returns a json string of the log record.""" - return self.json_serializer(log_record, - default=self.json_default, - cls=self.json_encoder, - indent=self.json_indent, - ensure_ascii=self.json_ensure_ascii) + return self.json_serializer( + log_record, + default=self.json_default, + cls=self.json_encoder, + indent=self.json_indent, + ensure_ascii=self.json_ensure_ascii, + ) def serialize_log_record(self, log_record: Dict[str, Any]) -> str: """Returns the final representation of the log record.""" @@ -230,14 +269,14 @@ def format(self, record: logging.LogRecord) -> str: # Display formatted exception, but allow overriding it in the # user-supplied dict. - if record.exc_info and not message_dict.get('exc_info'): - message_dict['exc_info'] = self.formatException(record.exc_info) - if not message_dict.get('exc_info') and record.exc_text: - message_dict['exc_info'] = record.exc_text + if record.exc_info and not message_dict.get("exc_info"): + message_dict["exc_info"] = self.formatException(record.exc_info) + if not message_dict.get("exc_info") and record.exc_text: + message_dict["exc_info"] = record.exc_text # Display formatted record of stack frames # default format is a string returned from :func:`traceback.print_stack` - if record.stack_info and not message_dict.get('stack_info'): - message_dict['stack_info'] = self.formatStack(record.stack_info) + if record.stack_info and not message_dict.get("stack_info"): + message_dict["stack_info"] = self.formatStack(record.stack_info) log_record: Dict[str, Any] = OrderedDict() self.add_fields(log_record, record, message_dict) diff --git a/tox.ini b/tox.ini index 5b2d751..8eafd27 100644 --- a/tox.ini +++ b/tox.ini @@ -31,4 +31,4 @@ description = run type checks deps = mypy>=1.0 commands = - mypy src \ No newline at end of file + mypy src From 3dd02573824bb78807c8f6196b4e25728cf5c5af Mon Sep 17 00:00:00 2001 From: Nicholas Hairs Date: Fri, 8 Mar 2024 16:00:30 +1100 Subject: [PATCH 02/12] Update README for fork / PEP 541 request --- README.md | 27 +++++++++------------------ 1 file changed, 9 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index 2efa41c..eaa78e9 100644 --- a/README.md +++ b/README.md @@ -2,27 +2,16 @@ [![License](https://img.shields.io/pypi/l/python-json-logger.svg)](https://pypi.python.org/pypi/python-json-logger/) [![Version](https://img.shields.io/pypi/v/python-json-logger.svg)](https://pypi.python.org/pypi/python-json-logger/) +**Important:** This repository is a maintained fork of [madzak/python-json-logger](https://github.com/madzak/python-json-logger) pending [a PEP 541 request](https://github.com/pypi/support/issues/3607) for the PyPI package. The future direction of the project is being discussed [here](https://github.com/nhairs/python-json-logger/issues/1). + Overview ======= This library is provided to allow standard python logging to output log data as json objects. With JSON we can make our logs more readable by machines and we can stop writing custom parsers for syslog type records. -News -======= -Hi, I see this package is quiet alive and I am sorry for ignoring it so long. I will be stepping up my maintenance of this package so please allow me a week to get things back in order (and most likely a new minor version) and I'll post and update here once I am caught up. - Installing ========== -Pip: - - pip install python-json-logger -Pypi: - - https://pypi.python.org/pypi/python-json-logger - -Manual: - - python setup.py install +Until the PEP 541 request is complete you will need to find your own means of installing the package (e.g. building and storing in a private package repository). Usage ===== @@ -166,9 +155,11 @@ Sample JSON with a full formatter (basically the log message from the unit test) } ``` -External Examples -================= +Author and Maintainers +====================== + +This project was originally authored by [Zakaria Zajac](https://github.com/madzak). -- [Wesley Tanaka - Structured log files in Python using python-json-logger](http://web.archive.org/web/20201130054012/https://wtanaka.com/node/8201) +It is currently maintained by: -- [Archive](https://web.archive.org/web/20201130054012/https://wtanaka.com/node/8201) +- [Nicholas Hairs](https://github.com/nhairs) - [nicholashairs.com](https://www.nicholashairs.com) From 19bfb6418699e42e000b57675eebfe202d39ae71 Mon Sep 17 00:00:00 2001 From: Nicholas Hairs Date: Tue, 12 Mar 2024 00:41:17 +1100 Subject: [PATCH 03/12] Update Supported Python Versions (#2) This updates the supported versions of python including those run in the CI test suite. - Python versions 3.7-3.13 - Drops 3.6 - PYPY 3.7-3.10 - Adds MacOS and Windows to test matrix - Add `taskName` to `RESERVED_ATTRS` (fixes: #3 ) --- .github/workflows/build.yml | 43 --------------------------- .github/workflows/release.yml | 32 -------------------- .github/workflows/test-suite.yml | 47 ++++++++++++++++++++++++++++++ setup.py | 7 +++-- src/pythonjsonlogger/jsonlogger.py | 26 ++++++++++------- tox.ini | 8 +++-- 6 files changed, 71 insertions(+), 92 deletions(-) delete mode 100644 .github/workflows/build.yml delete mode 100644 .github/workflows/release.yml create mode 100644 .github/workflows/test-suite.yml diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml deleted file mode 100644 index 9ea5d9d..0000000 --- a/.github/workflows/build.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Test python-json-logger - -on: - push: - branches: - - master - workflow_dispatch: - inputs: - logLevel: - description: 'Log level' - required: true - default: 'warning' - type: choice - options: - - info - - warning - - debug - pull_request: - types: [opened, reopened] - -jobs: - test: - runs-on: "ubuntu-20.04" #Moving down to 20.04 (latest is 22.04) because of python3.6 support - strategy: - fail-fast: false - matrix: - python-version: ["pypy-3.8", "pypy-3.9", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"] - - steps: - - uses: actions/checkout@v3 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install tox tox-gh-actions - - - name: Test with tox - run: tox diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index ee269e8..0000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Release python-json-logger build - -on: - release: - types: [ created ] - - workflow_dispatch: - -jobs: - publish: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - - name: Set up Python 3.11 - uses: actions/setup-python@v4 - with: - python-version: 3.11 - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install setuptools wheel twine typing_extensions - - - name: Build and Upload to PyPi - run: | - python setup.py sdist bdist_wheel - python -m twine upload dist/* - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml new file mode 100644 index 0000000..91a22d4 --- /dev/null +++ b/.github/workflows/test-suite.yml @@ -0,0 +1,47 @@ +name: Test python-json-logger + +on: + push: + branches: + - main + + pull_request: + branches: + - main + +jobs: + test: + name: "Python ${{matrix.python-version}} ${{ matrix.os }}" + runs-on: "${{ matrix.os }}" + strategy: + matrix: + python-version: + - "pypy-3.7" + - "pypy-3.8" + - "pypy-3.9" + - "pypy-3.10" + - "3.7" + - "3.8" + - "3.9" + - "3.10" + - "3.11" + - "3.12" + os: + - ubuntu-latest + - windows-latest + - macos-latest + + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install tox tox-gh-actions + + - name: Test with tox + run: tox diff --git a/setup.py b/setup.py index 74320b3..a5a84de 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ setup( name="python-json-logger", - version="2.0.7", + version="3.0.0.dev1", url="http://github.com/madzak/python-json-logger", license="BSD", include_package_data=True, @@ -21,7 +21,7 @@ package_data={"pythonjsonlogger": ["py.typed"]}, packages=find_packages("src", exclude="tests"), # https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires - python_requires=">=3.6", + python_requires=">=3.7", test_suite="tests.tests", classifiers=[ 'Development Status :: 6 - Mature', @@ -30,12 +30,13 @@ 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', 'Topic :: System :: Logging', ] ) diff --git a/src/pythonjsonlogger/jsonlogger.py b/src/pythonjsonlogger/jsonlogger.py index 519a64d..2b72f54 100644 --- a/src/pythonjsonlogger/jsonlogger.py +++ b/src/pythonjsonlogger/jsonlogger.py @@ -2,12 +2,14 @@ This library is provided to allow standard python logging to output log data as JSON formatted strings """ + import logging import json import re import traceback import importlib from datetime import date, datetime, time, timezone +import sys from typing import Any, Callable, Dict, List, Optional, Tuple, Union from inspect import istraceback @@ -16,7 +18,8 @@ # skip natural LogRecord attributes # http://docs.python.org/library/logging.html#logrecord-attributes -RESERVED_ATTRS: Tuple[str, ...] = ( +# Changed in 3.0.0, is now list[str] instead of tuple[str, ...] +RESERVED_ATTRS: List[str] = [ "args", "asctime", "created", @@ -39,7 +42,11 @@ "stack_info", "thread", "threadName", -) +] + +if sys.version_info >= (3, 12): + # taskName added in python 3.12 + RESERVED_ATTRS.append("taskName") OptionalCallableOrStr = Optional[Union[Callable, str]] @@ -63,9 +70,7 @@ def merge_record_extra( rename_fields = {} for key, value in record.__dict__.items(): # this allows to have numeric keys - if key not in reserved and not ( - hasattr(key, "startswith") and key.startswith("_") - ): + if key not in reserved and not (hasattr(key, "startswith") and key.startswith("_")): target[rename_fields.get(key, key)] = value return target @@ -79,10 +84,10 @@ def default(self, obj): if isinstance(obj, (date, datetime, time)): return self.format_datetime_obj(obj) - elif istraceback(obj): + if istraceback(obj): return "".join(traceback.format_tb(obj)).strip() - elif type(obj) == Exception or isinstance(obj, Exception) or type(obj) == type: + if type(obj) == Exception or isinstance(obj, Exception) or type(obj) == type: return str(obj) try: @@ -117,9 +122,9 @@ def __init__( prefix: str = "", rename_fields: Optional[dict] = None, static_fields: Optional[dict] = None, - reserved_attrs: Tuple[str, ...] = RESERVED_ATTRS, + reserved_attrs: Union[Tuple[str, ...], List[str]] = RESERVED_ATTRS, timestamp: Union[bool, str] = False, - **kwargs: Any + **kwargs: Any, ): """ :param json_default: a function for encoding non-standard objects @@ -197,8 +202,7 @@ def parse(self) -> List[str]: if self._fmt: return formatter_style_pattern.findall(self._fmt) - else: - return [] + return [] def add_fields( self, diff --git a/tox.ini b/tox.ini index 8eafd27..b611567 100644 --- a/tox.ini +++ b/tox.ini @@ -1,17 +1,19 @@ [tox] requires = tox>=3 -envlist = lint, type, pypy{38,39}, py{36,37,38,39,310,311} +envlist = lint, type, pypy{37,38,39,310}, py{37,38,39,310,311,312} [gh-actions] python = + pypy-3.7: pypy37 pypy-3.8: pypy38 pypy-3.9: pypy39 - 3.6: py36 + pypy-3.10: pypy310 3.7: py37 3.8: py38 3.9: py39 3.10: py310 - 3.11: py311, type + 3.11: py311 + 3.12: py312, type [testenv] description = run unit tests From 6523af19f479fd30aca63f313c057aee06f56f42 Mon Sep 17 00:00:00 2001 From: Nicholas Hairs Date: Tue, 12 Mar 2024 01:18:31 +1100 Subject: [PATCH 04/12] Add SECURITY.md --- SECURITY.md | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..d44d8f0 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,10 @@ +# Security Policy + +## Supported Versions + +**TLDR**: Security support is provided for Python versions `>=3.7`. + + +## Reporting a Vulnerability + +Please report vulnerabilties using GitHub [here](https://github.com/nhairs/python-json-logger/security/advisories/new). From cf77748f47fc401d5458f56b54ce9ae28b8a5a53 Mon Sep 17 00:00:00 2001 From: Nicholas Hairs Date: Tue, 12 Mar 2024 01:57:05 +1100 Subject: [PATCH 05/12] Set JsonFormatter.__init__ return type to None (#5) Fixes #4 --- src/pythonjsonlogger/jsonlogger.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pythonjsonlogger/jsonlogger.py b/src/pythonjsonlogger/jsonlogger.py index 2b72f54..0747803 100644 --- a/src/pythonjsonlogger/jsonlogger.py +++ b/src/pythonjsonlogger/jsonlogger.py @@ -125,7 +125,7 @@ def __init__( reserved_attrs: Union[Tuple[str, ...], List[str]] = RESERVED_ATTRS, timestamp: Union[bool, str] = False, **kwargs: Any, - ): + ) -> None: """ :param json_default: a function for encoding non-standard objects as outlined in https://docs.python.org/3/library/json.html From e72324e57d8f88bc81440493ec2caa10fa00bc9c Mon Sep 17 00:00:00 2001 From: Nicholas Hairs Date: Sun, 24 Mar 2024 17:48:13 +1100 Subject: [PATCH 06/12] Modernise testing, linting etc - Move to `pyproject.toml` - Move to pylint - Add validate-pyproject, black, pylint, mypy to lint command - add pylintrc - add mypy.ini - Apply pyupgrade 37+ - Update LICENSE copyright holders - fix lint errors --- LICENSE | 2 +- mypy.ini | 4 + pylintrc | 515 +++++++++++++++++++++++++++++ pyproject.toml | 74 +++++ setup.py | 42 --- src/pythonjsonlogger/jsonlogger.py | 52 +-- tests/test_jsonlogger.py | 160 +++++---- tox.ini | 46 +-- 8 files changed, 727 insertions(+), 168 deletions(-) create mode 100644 mypy.ini create mode 100644 pylintrc create mode 100644 pyproject.toml delete mode 100644 setup.py diff --git a/LICENSE b/LICENSE index f20f25d..90eaf67 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2011, Zakaria Zajac +Copyright (c) 2011, Zakaria Zajac and the python-json-logger Contributors All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..0c820cf --- /dev/null +++ b/mypy.ini @@ -0,0 +1,4 @@ +[mypy] + +[mypy-orjson.*] +ignore_missing_imports = True diff --git a/pylintrc b/pylintrc new file mode 100644 index 0000000..c2f821e --- /dev/null +++ b/pylintrc @@ -0,0 +1,515 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-whitelist= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use. +jobs=0 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead, + ## General Changes + # Explicit is better than implicit so allow bare returns + useless-return, + # pylint and black sometimes disagree - we always prefer black in these + # cases. Disable rules that can cause conflicts + line-too-long, + # Module docstrings are not required + missing-module-docstring + ## Project Disables + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'error', 'warning', 'refactor', and 'convention' +# which contain the number of messages in each category, as well as 'statement' +# which is the total number of statements analyzed. This score is used by the +# global evaluation report (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit + + +[LOGGING] + +# Format style used to check logging format string. `old` means using % +# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. +#class-attribute-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _, + e, + r, + id, + f, + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. +#variable-rgx= + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[STRING] + +# This flag controls whether the implicit-str-concat-in-sequence should +# generate a warning on implicit string concatenation in sequences defined over +# several lines. +check-str-concat-over-line-jumps=no + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it work, +# install the python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format=LF # Force UNIX style new lines + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules=optparse,tkinter.tix + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled). +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled). +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=cls + + +[DESIGN] + +# Maximum number of arguments for function / method. +max-args=10 + +# Maximum number of attributes for a class (see R0902). +max-attributes=15 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=10 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=1 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "BaseException, Exception". +overgeneral-exceptions=builtins.BaseException, + builtins.Exception diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..83cdc47 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,74 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "python-json-logger" +version = "3.0.0.dev1" +description = "JSON Log Formatter for the Python Logging Package" +authors = [ + {name = "Zakaria Zajac", email = "zak@madzak.com"}, +] +maintainers = [ + {name = "Nicholas Hairs", email = "info+python-json-logger@nicholashairs.com"}, +] + +# Dependency Information +requires-python = ">=3.7" +# dependencies = [] + +# Extra information +readme = "README.md" +license = {text = "BSD-2-Clause License"} +classifiers = [ + "Development Status :: 6 - Mature", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: System :: Logging", + "Typing :: Typed", +] + +[project.urls] +# homepage = "https://nhairs.github.io/python-json-logger/latest/" +GitHub = "https://github.com/nhairs/python-json-logger" + +[project.optional-dependencies] +dev = [ + ## Formatting / Linting + "validate-pyproject[all]", + "black", + "pylint", + "mypy", + ## Testing + "pytest", +] + +#docs = [ +# "black", +# "mkdocs", +# "mkdocs-material>=8.5", +# "mkdocs-awesome-pages-plugin", +# "mdx_truly_sane_lists", +# "mkdocstrings[python]", +# "mkdocs-gen-files", +# "mkdocs-literate-nav", +# "mike", +#] + +[tool.setuptools.packages.find] +where = ["src"] +include = ["pythonjsonlogger*"] + +[tool.setuptools.package-data] +pythonjsonlogger = ["py.typed"] + +[tool.black] +line-length = 100 diff --git a/setup.py b/setup.py deleted file mode 100644 index a5a84de..0000000 --- a/setup.py +++ /dev/null @@ -1,42 +0,0 @@ -from os import path -from setuptools import setup, find_packages - -# read the contents of your README file -this_directory = path.abspath(path.dirname(__file__)) -with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -setup( - name="python-json-logger", - version="3.0.0.dev1", - url="http://github.com/madzak/python-json-logger", - license="BSD", - include_package_data=True, - description="A python library adding a json log formatter", - long_description=long_description, - long_description_content_type='text/markdown', - author="Zakaria Zajac", - author_email="zak@madzak.com", - package_dir={'': 'src'}, - package_data={"pythonjsonlogger": ["py.typed"]}, - packages=find_packages("src", exclude="tests"), - # https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires - python_requires=">=3.7", - test_suite="tests.tests", - classifiers=[ - 'Development Status :: 6 - Mature', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3.12', - 'Topic :: System :: Logging', - ] -) diff --git a/src/pythonjsonlogger/jsonlogger.py b/src/pythonjsonlogger/jsonlogger.py index 0747803..f2cc59a 100644 --- a/src/pythonjsonlogger/jsonlogger.py +++ b/src/pythonjsonlogger/jsonlogger.py @@ -80,28 +80,34 @@ class JsonEncoder(json.JSONEncoder): A custom encoder extending the default JSONEncoder """ - def default(self, obj): - if isinstance(obj, (date, datetime, time)): - return self.format_datetime_obj(obj) + def default(self, o: Any) -> Any: + if isinstance(o, (date, datetime, time)): + return self.format_datetime_obj(o) - if istraceback(obj): - return "".join(traceback.format_tb(obj)).strip() + if istraceback(o): + return "".join(traceback.format_tb(o)).strip() - if type(obj) == Exception or isinstance(obj, Exception) or type(obj) == type: - return str(obj) + # pylint: disable=unidiomatic-typecheck + if type(o) == Exception or isinstance(o, Exception) or type(o) == type: + return str(o) try: - return super(JsonEncoder, self).default(obj) + return super().default(o) except TypeError: try: - return str(obj) + return str(o) - except Exception: + except Exception: # pylint: disable=broad-exception-caught return None - def format_datetime_obj(self, obj): - return obj.isoformat() + def format_datetime_obj(self, o): + """Format datetime objects found in self.default + + This allows subclasses to change the datetime format without understanding the + internals of the default method. + """ + return o.isoformat() class JsonFormatter(logging.Formatter): @@ -111,6 +117,7 @@ class JsonFormatter(logging.Formatter): json default encoder """ + # pylint: disable=too-many-arguments def __init__( self, *args: Any, @@ -122,7 +129,7 @@ def __init__( prefix: str = "", rename_fields: Optional[dict] = None, static_fields: Optional[dict] = None, - reserved_attrs: Union[Tuple[str, ...], List[str]] = RESERVED_ATTRS, + reserved_attrs: Union[Tuple[str, ...], List[str], None] = None, timestamp: Union[bool, str] = False, **kwargs: Any, ) -> None: @@ -155,6 +162,8 @@ def __init__( self.prefix = prefix self.rename_fields = rename_fields or {} self.static_fields = static_fields or {} + if reserved_attrs is None: + reserved_attrs = RESERVED_ATTRS self.reserved_attrs = dict(zip(reserved_attrs, reserved_attrs)) self.timestamp = timestamp @@ -166,6 +175,7 @@ def __init__( self._required_fields = self.parse() self._skip_fields = dict(zip(self._required_fields, self._required_fields)) self._skip_fields.update(self.reserved_attrs) + return def _str_to_fn(self, fn_as_str): """ @@ -198,7 +208,7 @@ def parse(self) -> List[str]: elif isinstance(self._style, logging.PercentStyle): formatter_style_pattern = re.compile(r"%\((.+?)\)", re.IGNORECASE) else: - raise ValueError("Invalid format: %s" % self._fmt) + raise ValueError(f"Invalid format: {self._fmt!r}") if self._fmt: return formatter_style_pattern.findall(self._fmt) @@ -226,24 +236,28 @@ def add_fields( ) if self.timestamp: + # TODO: Can this use isinstance instead? + # pylint: disable=unidiomatic-typecheck key = self.timestamp if type(self.timestamp) == str else "timestamp" log_record[key] = datetime.fromtimestamp(record.created, tz=timezone.utc) self._perform_rename_log_fields(log_record) + return - def _perform_rename_log_fields(self, log_record): + def _perform_rename_log_fields(self, log_record: Dict[str, Any]) -> None: for old_field_name, new_field_name in self.rename_fields.items(): log_record[new_field_name] = log_record[old_field_name] del log_record[old_field_name] + return - def process_log_record(self, log_record): + def process_log_record(self, log_record: Dict[str, Any]) -> Dict[str, Any]: """ Override this method to implement custom logic on the possibly ordered dictionary. """ return log_record - def jsonify_log_record(self, log_record): + def jsonify_log_record(self, log_record: Dict[str, Any]) -> str: """Returns a json string of the log record.""" return self.json_serializer( log_record, @@ -255,12 +269,12 @@ def jsonify_log_record(self, log_record): def serialize_log_record(self, log_record: Dict[str, Any]) -> str: """Returns the final representation of the log record.""" - return "%s%s" % (self.prefix, self.jsonify_log_record(log_record)) + return self.prefix + self.jsonify_log_record(log_record) def format(self, record: logging.LogRecord) -> str: """Formats a log record and serializes to json""" message_dict: Dict[str, Any] = {} - # FIXME: logging.LogRecord.msg and logging.LogRecord.message in typeshed + # TODO: logging.LogRecord.msg and logging.LogRecord.message in typeshed # are always type of str. We shouldn't need to override that. if isinstance(record.msg, dict): message_dict = record.msg diff --git a/tests/test_jsonlogger.py b/tests/test_jsonlogger.py index af369d2..abd04ba 100644 --- a/tests/test_jsonlogger.py +++ b/tests/test_jsonlogger.py @@ -1,27 +1,20 @@ -# -*- coding: utf-8 -*- -import unittest -import unittest.mock +import datetime import logging +from io import StringIO import json +import random import sys import traceback -import random - -try: - import xmlrunner # noqa -except ImportError: - pass - -from io import StringIO +import unittest +import unittest.mock -sys.path.append('src/python-json-logger') +sys.path.append("src/python-json-logger") from pythonjsonlogger import jsonlogger -import datetime class TestJsonLogger(unittest.TestCase): def setUp(self): - self.log = logging.getLogger("logging-test-{}".format(random.randint(1, 101))) + self.log = logging.getLogger(f"logging-test-{random.randint(1, 101)}") self.log.setLevel(logging.DEBUG) self.buffer = StringIO() @@ -41,7 +34,7 @@ def test_default_format(self): def test_percentage_format(self): fr = jsonlogger.JsonFormatter( # All kind of different styles to check the regex - '[%(levelname)8s] %(message)s %(filename)s:%(lineno)d %(asctime)' + "[%(levelname)8s] %(message)s %(filename)s:%(lineno)d %(asctime)" ) self.log_handler.setFormatter(fr) @@ -50,10 +43,10 @@ def test_percentage_format(self): log_json = json.loads(self.buffer.getvalue()) self.assertEqual(log_json["message"], msg) - self.assertEqual(log_json.keys(), {'levelname', 'message', 'filename', 'lineno', 'asctime'}) + self.assertEqual(log_json.keys(), {"levelname", "message", "filename", "lineno", "asctime"}) def test_rename_base_field(self): - fr = jsonlogger.JsonFormatter(rename_fields={'message': '@message'}) + fr = jsonlogger.JsonFormatter(rename_fields={"message": "@message"}) self.log_handler.setFormatter(fr) msg = "testing logging format" @@ -63,7 +56,7 @@ def test_rename_base_field(self): self.assertEqual(log_json["@message"], msg) def test_rename_nonexistent_field(self): - fr = jsonlogger.JsonFormatter(rename_fields={'nonexistent_key': 'new_name'}) + fr = jsonlogger.JsonFormatter(rename_fields={"nonexistent_key": "new_name"}) self.log_handler.setFormatter(fr) stderr_watcher = StringIO() @@ -73,7 +66,7 @@ def test_rename_nonexistent_field(self): self.assertTrue("KeyError: 'nonexistent_key'" in stderr_watcher.getvalue()) def test_add_static_fields(self): - fr = jsonlogger.JsonFormatter(static_fields={'log_stream': 'kafka'}) + fr = jsonlogger.JsonFormatter(static_fields={"log_stream": "kafka"}) self.log_handler.setFormatter(fr) @@ -86,27 +79,27 @@ def test_add_static_fields(self): def test_format_keys(self): supported_keys = [ - 'asctime', - 'created', - 'filename', - 'funcName', - 'levelname', - 'levelno', - 'lineno', - 'module', - 'msecs', - 'message', - 'name', - 'pathname', - 'process', - 'processName', - 'relativeCreated', - 'thread', - 'threadName' + "asctime", + "created", + "filename", + "funcName", + "levelname", + "levelno", + "lineno", + "module", + "msecs", + "message", + "name", + "pathname", + "process", + "processName", + "relativeCreated", + "thread", + "threadName", ] - log_format = lambda x: ['%({0:s})s'.format(i) for i in x] - custom_format = ' '.join(log_format(supported_keys)) + log_format = lambda x: [f"%({i:s})s" for i in x] + custom_format = " ".join(log_format(supported_keys)) fr = jsonlogger.JsonFormatter(custom_format) self.log_handler.setFormatter(fr) @@ -121,7 +114,7 @@ def test_format_keys(self): self.assertTrue(True) def test_unknown_format_key(self): - fr = jsonlogger.JsonFormatter('%(unknown_key)s %(message)s') + fr = jsonlogger.JsonFormatter("%(unknown_key)s %(message)s") self.log_handler.setFormatter(fr) msg = "testing unknown logging format" @@ -134,8 +127,7 @@ def test_log_adict(self): fr = jsonlogger.JsonFormatter() self.log_handler.setFormatter(fr) - msg = {"text": "testing logging", "num": 1, 5: "9", - "nested": {"more": "data"}} + msg = {"text": "testing logging", "num": 1, 5: "9", "nested": {"more": "data"}} self.log.info(msg) log_json = json.loads(self.buffer.getvalue()) @@ -149,8 +141,7 @@ def test_log_extra(self): fr = jsonlogger.JsonFormatter() self.log_handler.setFormatter(fr) - extra = {"text": "testing logging", "num": 1, 5: "9", - "nested": {"more": "data"}} + extra = {"text": "testing logging", "num": 1, 5: "9", "nested": {"more": "data"}} self.log.info("hello", extra=extra) log_json = json.loads(self.buffer.getvalue()) self.assertEqual(log_json.get("text"), extra["text"]) @@ -163,19 +154,20 @@ def test_json_default_encoder(self): fr = jsonlogger.JsonFormatter() self.log_handler.setFormatter(fr) - msg = {"adate": datetime.datetime(1999, 12, 31, 23, 59), - "otherdate": datetime.date(1789, 7, 14), - "otherdatetime": datetime.datetime(1789, 7, 14, 23, 59), - "otherdatetimeagain": datetime.datetime(1900, 1, 1)} + msg = { + "adate": datetime.datetime(1999, 12, 31, 23, 59), + "otherdate": datetime.date(1789, 7, 14), + "otherdatetime": datetime.datetime(1789, 7, 14, 23, 59), + "otherdatetimeagain": datetime.datetime(1900, 1, 1), + } self.log.info(msg) log_json = json.loads(self.buffer.getvalue()) self.assertEqual(log_json.get("adate"), "1999-12-31T23:59:00") self.assertEqual(log_json.get("otherdate"), "1789-07-14") self.assertEqual(log_json.get("otherdatetime"), "1789-07-14T23:59:00") - self.assertEqual(log_json.get("otherdatetimeagain"), - "1900-01-01T00:00:00") + self.assertEqual(log_json.get("otherdatetimeagain"), "1900-01-01T00:00:00") - @unittest.mock.patch('time.time', return_value=1500000000.0) + @unittest.mock.patch("time.time", return_value=1500000000.0) def test_json_default_encoder_with_timestamp(self, time_mock): fr = jsonlogger.JsonFormatter(timestamp=True) self.log_handler.setFormatter(fr) @@ -189,11 +181,11 @@ def test_json_default_encoder_with_timestamp(self, time_mock): def test_json_custom_default(self): def custom(o): return "very custom" + fr = jsonlogger.JsonFormatter(json_default=custom) self.log_handler.setFormatter(fr) - msg = {"adate": datetime.datetime(1999, 12, 31, 23, 59), - "normal": "value"} + msg = {"adate": datetime.datetime(1999, 12, 31, 23, 59), "normal": "value"} self.log.info(msg) log_json = json.loads(self.buffer.getvalue()) self.assertEqual(log_json.get("adate"), "very custom") @@ -215,12 +207,12 @@ def process_log_record(self, log_record): def get_traceback_from_exception_followed_by_log_call(self) -> str: try: - raise Exception('test') + raise Exception("test") except Exception: self.log.exception("hello") str_traceback = traceback.format_exc() # Formatter removes trailing new line - if str_traceback.endswith('\n'): + if str_traceback.endswith("\n"): str_traceback = str_traceback[:-1] return str_traceback @@ -245,14 +237,14 @@ def test_exc_info_renamed(self): def test_ensure_ascii_true(self): fr = jsonlogger.JsonFormatter() self.log_handler.setFormatter(fr) - self.log.info('Привет') + self.log.info("Привет") msg = self.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0] self.assertEqual(msg, r"\u041f\u0440\u0438\u0432\u0435\u0442") def test_ensure_ascii_false(self): fr = jsonlogger.JsonFormatter(json_ensure_ascii=False) self.log_handler.setFormatter(fr) - self.log.info('Привет') + self.log.info("Привет") msg = self.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0] self.assertEqual(msg, "Привет") @@ -262,10 +254,11 @@ def encode_complex(z): return (z.real, z.imag) else: type_name = z.__class__.__name__ - raise TypeError("Object of type '{}' is no JSON serializable".format(type_name)) + raise TypeError(f"Object of type '{type_name}' is no JSON serializable") - formatter = jsonlogger.JsonFormatter(json_default=encode_complex, - json_encoder=json.JSONEncoder) + formatter = jsonlogger.JsonFormatter( + json_default=encode_complex, json_encoder=json.JSONEncoder + ) self.log_handler.setFormatter(formatter) value = { @@ -274,44 +267,43 @@ def encode_complex(z): self.log.info(" message", extra=value) msg = self.buffer.getvalue() - self.assertEqual(msg, "{\"message\": \" message\", \"special\": [3.0, 8.0]}\n") + self.assertEqual(msg, '{"message": " message", "special": [3.0, 8.0]}\n') def test_rename_reserved_attrs(self): - log_format = lambda x: ['%({0:s})s'.format(i) for i in x] + log_format = lambda x: [f"%({i:s})s" for i in x] reserved_attrs_map = { - 'exc_info': 'error.type', - 'exc_text': 'error.message', - 'funcName': 'log.origin.function', - 'levelname': 'log.level', - 'module': 'log.origin.file.name', - 'processName': 'process.name', - 'threadName': 'process.thread.name', - 'msg': 'log.message' + "exc_info": "error.type", + "exc_text": "error.message", + "funcName": "log.origin.function", + "levelname": "log.level", + "module": "log.origin.file.name", + "processName": "process.name", + "threadName": "process.thread.name", + "msg": "log.message", } - custom_format = ' '.join(log_format(reserved_attrs_map.keys())) - reserved_attrs = [_ for _ in jsonlogger.RESERVED_ATTRS if _ not in list(reserved_attrs_map.keys())] - formatter = jsonlogger.JsonFormatter(custom_format, reserved_attrs=reserved_attrs, rename_fields=reserved_attrs_map) + custom_format = " ".join(log_format(reserved_attrs_map.keys())) + reserved_attrs = [ + _ for _ in jsonlogger.RESERVED_ATTRS if _ not in list(reserved_attrs_map.keys()) + ] + formatter = jsonlogger.JsonFormatter( + custom_format, reserved_attrs=reserved_attrs, rename_fields=reserved_attrs_map + ) self.log_handler.setFormatter(formatter) self.log.info("message") msg = self.buffer.getvalue() - self.assertEqual(msg, '{"error.type": null, "error.message": null, "log.origin.function": "test_rename_reserved_attrs", "log.level": "INFO", "log.origin.file.name": "test_jsonlogger", "process.name": "MainProcess", "process.thread.name": "MainThread", "log.message": "message"}\n') + self.assertEqual( + msg, + '{"error.type": null, "error.message": null, "log.origin.function": "test_rename_reserved_attrs", "log.level": "INFO", "log.origin.file.name": "test_jsonlogger", "process.name": "MainProcess", "process.thread.name": "MainThread", "log.message": "message"}\n', + ) def test_merge_record_extra(self): - record = logging.LogRecord("name", level=1, pathname="", lineno=1, msg="Some message", args=None, exc_info=None) + record = logging.LogRecord( + "name", level=1, pathname="", lineno=1, msg="Some message", args=None, exc_info=None + ) output = jsonlogger.merge_record_extra(record, target=dict(foo="bar"), reserved=[]) self.assertIn("foo", output) self.assertIn("msg", output) self.assertEqual(output["foo"], "bar") self.assertEqual(output["msg"], "Some message") - - -if __name__ == '__main__': - if len(sys.argv[1:]) > 0: - if sys.argv[1] == 'xml': - testSuite = unittest.TestLoader().loadTestsFromTestCase( - TestJsonLogger) - xmlrunner.XMLTestRunner(output='reports').run(testSuite) - else: - unittest.main() diff --git a/tox.ini b/tox.ini index b611567..e0da8dc 100644 --- a/tox.ini +++ b/tox.ini @@ -4,33 +4,35 @@ envlist = lint, type, pypy{37,38,39,310}, py{37,38,39,310,311,312} [gh-actions] python = - pypy-3.7: pypy37 - pypy-3.8: pypy38 - pypy-3.9: pypy39 - pypy-3.10: pypy310 - 3.7: py37 - 3.8: py38 - 3.9: py39 - 3.10: py310 - 3.11: py311 - 3.12: py312, type + pypy-3.7: pypy37 + pypy-3.8: pypy38 + pypy-3.9: pypy39 + pypy-3.10: pypy310 + 3.7: py37 + 3.8: py38 + 3.9: py39 + 3.10: py310 + 3.11: py311 + 3.12: py312, lint [testenv] description = run unit tests +extras = dev commands = - python -m unittest discover + pytest tests + # python -m unittest discover -[testenv:lint] -description = run linters -skip_install = true -deps = - black>=22.12 +[testenv:format] +description = run formatters +extras = dev commands = - black src + black src tests -[testenv:type] -description = run type checks -deps = - mypy>=1.0 +[testenv:lint] +description = run linters +extras = dev commands = - mypy src + validate-pyproject pyproject.toml + black --check --diff src tests + pylint src + mypy src tests From a907627509c63f1ac0522c569114bdce6aedc28c Mon Sep 17 00:00:00 2001 From: Nicholas Hairs Date: Sun, 24 Mar 2024 18:06:52 +1100 Subject: [PATCH 07/12] Seperate dev deps into lint and test This is to avoid issue with pypy37 where black needs the typed-ast module which won't build. We don't actually use black in the pypy37 test environment so lets split things up --- pyproject.toml | 7 ++++--- tox.ini | 6 +++--- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 83cdc47..ff9a760 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,13 +41,14 @@ classifiers = [ GitHub = "https://github.com/nhairs/python-json-logger" [project.optional-dependencies] -dev = [ - ## Formatting / Linting +lint = [ "validate-pyproject[all]", "black", "pylint", "mypy", - ## Testing +] + +test = [ "pytest", ] diff --git a/tox.ini b/tox.ini index e0da8dc..2827d68 100644 --- a/tox.ini +++ b/tox.ini @@ -17,20 +17,20 @@ python = [testenv] description = run unit tests -extras = dev +extras = test commands = pytest tests # python -m unittest discover [testenv:format] description = run formatters -extras = dev +extras = lint commands = black src tests [testenv:lint] description = run linters -extras = dev +extras = lint commands = validate-pyproject pyproject.toml black --check --diff src tests From d4d0b3d11f78fe697ed686183300420b1cc82ba3 Mon Sep 17 00:00:00 2001 From: Nicholas Hairs Date: Sun, 24 Mar 2024 18:34:20 +1100 Subject: [PATCH 08/12] Move running lint to GHA workflow --- .github/workflows/test-suite.yml | 4 ++++ tox.ini | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index 91a22d4..621f22f 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -45,3 +45,7 @@ jobs: - name: Test with tox run: tox + + - name: Lint with tox + run: tox -e lint + if: "${{ matrix.python-version == '3.12' && matrix.os == 'ubuntu-latest' }}" diff --git a/tox.ini b/tox.ini index 2827d68..2dafe83 100644 --- a/tox.ini +++ b/tox.ini @@ -13,7 +13,7 @@ python = 3.9: py39 3.10: py310 3.11: py311 - 3.12: py312, lint + 3.12: py312 [testenv] description = run unit tests From 7c7b72ed86647fa732865ad6408d248f0c8afdcb Mon Sep 17 00:00:00 2001 From: Nicholas Hairs Date: Sun, 24 Mar 2024 18:41:03 +1100 Subject: [PATCH 09/12] Split GHA lint and test jobs --- .github/workflows/test-suite.yml | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index 621f22f..f4b3b41 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -10,8 +10,27 @@ on: - main jobs: + lint: + name: "Python Lint" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install tox + + - name: Lint with tox + run: tox -e lint + test: - name: "Python ${{matrix.python-version}} ${{ matrix.os }}" + name: "Python Test ${{matrix.python-version}} ${{ matrix.os }}" + needs: [lint] runs-on: "${{ matrix.os }}" strategy: matrix: @@ -45,7 +64,3 @@ jobs: - name: Test with tox run: tox - - - name: Lint with tox - run: tox -e lint - if: "${{ matrix.python-version == '3.12' && matrix.os == 'ubuntu-latest' }}" From 7d2c48b3a17264cd057d17301f3c61dd82133813 Mon Sep 17 00:00:00 2001 From: Nicholas Hairs Date: Sun, 24 Mar 2024 19:13:28 +1100 Subject: [PATCH 10/12] Update README --- README.md | 62 +++++++++++++++++++++++++++++++++++++++---------------- tox.ini | 1 - 2 files changed, 44 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index eaa78e9..bcf6302 100644 --- a/README.md +++ b/README.md @@ -1,22 +1,46 @@ -![Build Status](https://github.com/madzak/python-json-logger/actions/workflows/build.yml/badge.svg) +![Build Status](https://github.com/nhairs/python-json-logger/actions/workflows/test-suite.yml/badge.svg) [![License](https://img.shields.io/pypi/l/python-json-logger.svg)](https://pypi.python.org/pypi/python-json-logger/) [![Version](https://img.shields.io/pypi/v/python-json-logger.svg)](https://pypi.python.org/pypi/python-json-logger/) -**Important:** This repository is a maintained fork of [madzak/python-json-logger](https://github.com/madzak/python-json-logger) pending [a PEP 541 request](https://github.com/pypi/support/issues/3607) for the PyPI package. The future direction of the project is being discussed [here](https://github.com/nhairs/python-json-logger/issues/1). +# Python JSON Logger -Overview -======= This library is provided to allow standard python logging to output log data as json objects. With JSON we can make our logs more readable by machines and we can stop writing custom parsers for syslog type records. -Installing -========== -Until the PEP 541 request is complete you will need to find your own means of installing the package (e.g. building and storing in a private package repository). +### 🚨 Important 🚨 -Usage -===== +This repository is a maintained fork of [madzak/python-json-logger](https://github.com/madzak/python-json-logger) pending [a PEP 541 request](https://github.com/pypi/support/issues/3607) for the PyPI package. The future direction of the project is being discussed [here](https://github.com/nhairs/python-json-logger/issues/1). -## Integrating with Python's logging framework +## Installation + +### Install via pip / PyPI + +Until the PEP 541 request is complete you will need to use one of the alternative methods below. + +### Install from GitHub + +```shell +pip install 'python-json-logger@git+https://github.com/nhairs/python-json-logger.git' +``` + +To install a specific version: + +```shell +pip install 'python-json-logger@git+https://github.com/nhairs/python-json-logger.git@v2.0.7' +``` + + +### Install from Source + +```shell +git clone https://github.com/nhairs/python-json-logger.git +cd python-json-logger +pip install -e . +``` + +## Usage + +### Integrating with Python's logging framework Json outputs are provided by the JsonFormatter logging formatter. You can add the custom formatter like below: @@ -34,7 +58,7 @@ Json outputs are provided by the JsonFormatter logging formatter. You can add th logger.addHandler(logHandler) ``` -## Customizing fields +### Customizing fields The fmt parser can also be overidden if you want to have required fields that differ from the default of just `message`. @@ -76,7 +100,7 @@ formatter = CustomJsonFormatter('%(timestamp)s %(level)s %(name)s %(message)s') Items added to the log record will be included in *every* log message, no matter what the format requires. -## Adding custom object serialization +### Adding custom object serialization For custom handling of object serialization you can specify default json object translator or provide a custom encoder @@ -93,7 +117,7 @@ logger.info({"special": "value", "run": 12}) logger.info("classic message", extra={"special": "value", "run": 12}) ``` -## Using a Config File +### Using a Config File To use the module with a config file using the [`fileConfig` function](https://docs.python.org/3/library/logging.config.html#logging.config.fileConfig), use the class `pythonjsonlogger.jsonlogger.JsonFormatter`. Here is a sample config file. @@ -126,8 +150,7 @@ format = %(message)s class = pythonjsonlogger.jsonlogger.JsonFormatter ``` -Example Output -============== +## Example Output Sample JSON with a full formatter (basically the log message from the unit test). Every log message will appear on 1 line like a typical logger. @@ -155,10 +178,13 @@ Sample JSON with a full formatter (basically the log message from the unit test) } ``` -Author and Maintainers -====================== +## License + +This project is licensed under the BSD 2 Clause License - see [`LICENSE`](https://github.com/nhairs/python-json-logger/blob/main/LICENSE) + +## Authors and Maintainers -This project was originally authored by [Zakaria Zajac](https://github.com/madzak). +This project was originally authored by [Zakaria Zajac](https://github.com/madzak) and our wonderful [contributors](https://github.com/nhairs/python-json-logger/graphs/contributors) It is currently maintained by: diff --git a/tox.ini b/tox.ini index 2dafe83..946be58 100644 --- a/tox.ini +++ b/tox.ini @@ -20,7 +20,6 @@ description = run unit tests extras = test commands = pytest tests - # python -m unittest discover [testenv:format] description = run formatters From 296ff7a54e6ef2f92287fe5ac8688e293c8625b0 Mon Sep 17 00:00:00 2001 From: Nicholas Hairs Date: Sun, 24 Mar 2024 19:56:50 +1100 Subject: [PATCH 11/12] Sort RESERVED_ATTRS if adding new values to it --- src/pythonjsonlogger/jsonlogger.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pythonjsonlogger/jsonlogger.py b/src/pythonjsonlogger/jsonlogger.py index f2cc59a..259dff7 100644 --- a/src/pythonjsonlogger/jsonlogger.py +++ b/src/pythonjsonlogger/jsonlogger.py @@ -47,6 +47,7 @@ if sys.version_info >= (3, 12): # taskName added in python 3.12 RESERVED_ATTRS.append("taskName") + RESERVED_ATTRS.sort() OptionalCallableOrStr = Optional[Union[Callable, str]] From 7c5a3e6f7489d23eb0f016c5c12a2ff27c1dc192 Mon Sep 17 00:00:00 2001 From: Nicholas Hairs Date: Sun, 24 Mar 2024 20:14:00 +1100 Subject: [PATCH 12/12] Release 3.0.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index ff9a760..f3705dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "python-json-logger" -version = "3.0.0.dev1" +version = "3.0.0" description = "JSON Log Formatter for the Python Logging Package" authors = [ {name = "Zakaria Zajac", email = "zak@madzak.com"},