JSON logging for Celery workers.
This enables JSON logging for Celery workers if the LOG_JSON conig value is set. It uses the same JsonFormatter class used by the Flask applications. That class has been updated in two ways: - It takes a `source` kwarg to define the log source for the formatter. - The `msg` attribute of the log record is formatted with any arguments that may have been passed. This is necessary for Celery to render task type, completion time, etc. into the log output.
This commit is contained in:
parent
e56e0d8619
commit
9ae20b4a2a
@ -1,5 +1,6 @@
|
|||||||
from celery import Celery
|
from celery import Celery
|
||||||
|
|
||||||
|
|
||||||
celery = Celery(__name__)
|
celery = Celery(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -32,14 +32,25 @@ class JsonFormatter(logging.Formatter):
|
|||||||
("dod_edipi", lambda r: r.__dict__.get("dod_edipi")),
|
("dod_edipi", lambda r: r.__dict__.get("dod_edipi")),
|
||||||
("severity", lambda r: r.levelname),
|
("severity", lambda r: r.levelname),
|
||||||
("tags", lambda r: r.__dict__.get("tags")),
|
("tags", lambda r: r.__dict__.get("tags")),
|
||||||
("message", lambda r: r.msg),
|
|
||||||
("audit_event", lambda r: r.__dict__.get("audit_event")),
|
("audit_event", lambda r: r.__dict__.get("audit_event")),
|
||||||
]
|
]
|
||||||
|
|
||||||
def format(self, record):
|
def __init__(self, *args, source="atst", **kwargs):
|
||||||
message_dict = {"source": "atst"}
|
self.source = source
|
||||||
|
super().__init__(self)
|
||||||
|
|
||||||
|
def format(self, record, *args, **kwargs):
|
||||||
|
message_dict = {"source": self.source}
|
||||||
|
|
||||||
for field, func in self._DEFAULT_RECORD_FIELDS:
|
for field, func in self._DEFAULT_RECORD_FIELDS:
|
||||||
message_dict[field] = func(record)
|
result = func(record)
|
||||||
|
if result:
|
||||||
|
message_dict[field] = result
|
||||||
|
|
||||||
|
if record.args:
|
||||||
|
message_dict["message"] = record.msg % record.args
|
||||||
|
else:
|
||||||
|
message_dict["message"] = record.msg
|
||||||
|
|
||||||
if record.__dict__.get("exc_info") is not None:
|
if record.__dict__.get("exc_info") is not None:
|
||||||
message_dict["details"] = {
|
message_dict["details"] = {
|
||||||
|
@ -1,7 +1,19 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
import logging
|
||||||
|
|
||||||
from atst.app import celery, make_app, make_config
|
from atst.app import celery, make_app, make_config
|
||||||
|
from celery.signals import after_setup_task_logger
|
||||||
|
|
||||||
|
from atst.utils.logging import JsonFormatter
|
||||||
|
|
||||||
config = make_config()
|
config = make_config()
|
||||||
app = make_app(config)
|
app = make_app(config)
|
||||||
app.app_context().push()
|
app.app_context().push()
|
||||||
|
|
||||||
|
|
||||||
|
@after_setup_task_logger.connect
|
||||||
|
def setup_task_logger(*args, **kwargs):
|
||||||
|
if app.config.get("LOG_JSON"):
|
||||||
|
logger = logging.getLogger()
|
||||||
|
for handler in logger.handlers:
|
||||||
|
handler.setFormatter(JsonFormatter(source="queue"))
|
||||||
|
@ -69,10 +69,12 @@ def test_request_context_filter(logger, log_stream_content, request_ctx, monkeyp
|
|||||||
|
|
||||||
user = Mock(spec=["id"])
|
user = Mock(spec=["id"])
|
||||||
user.id = user_uuid
|
user.id = user_uuid
|
||||||
|
user.dod_id = "5678901234"
|
||||||
|
|
||||||
monkeypatch.setattr("atst.utils.logging.g", Mock(current_user=user))
|
monkeypatch.setattr("atst.utils.logging.g", Mock(current_user=user))
|
||||||
request_ctx.request.environ["HTTP_X_REQUEST_ID"] = request_uuid
|
request_ctx.request.environ["HTTP_X_REQUEST_ID"] = request_uuid
|
||||||
logger.info("this user is doing something")
|
logger.info("this user is doing something")
|
||||||
log = json.loads(log_stream_content())
|
log = json.loads(log_stream_content())
|
||||||
assert log["user_id"] == str(user_uuid)
|
assert log["user_id"] == str(user_uuid)
|
||||||
|
assert log["dod_edipi"] == str(user.dod_id)
|
||||||
assert log["request_id"] == request_uuid
|
assert log["request_id"] == request_uuid
|
||||||
|
Loading…
x
Reference in New Issue
Block a user