Skip to content

Commit

Permalink
Remove structlog (#957)
Browse files Browse the repository at this point in the history
* Replace ctms.web logger by logging

* Untangle metrics from log context

* Replace structlog with logging

* Move log initialization to app.py

* Do not reconfigure logs using Alembic config

* Run ruff manually to sort imports

* @grahamalama review
  • Loading branch information
leplatrem authored Aug 7, 2024
1 parent b3a19eb commit b42575d
Show file tree
Hide file tree
Showing 13 changed files with 166 additions and 240 deletions.
7 changes: 1 addition & 6 deletions asgi.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,17 @@
import uvicorn

from ctms.config import Settings
from ctms.log import configure_logging

settings = Settings()


if __name__ == "__main__":
logging_config = configure_logging(
settings.use_mozlog, settings.logging_level.name, settings.log_sqlalchemy
)

server = uvicorn.Server(
uvicorn.Config(
"ctms.app:app",
host=settings.host,
port=settings.port,
log_config=logging_config,
log_config=None,
)
)
server.run()
24 changes: 18 additions & 6 deletions ctms/app.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import logging
import sys
import time

import structlog
import uvicorn
from dockerflow.fastapi import router as dockerflow_router
from dockerflow.fastapi.middleware import RequestIdMiddleware
Expand All @@ -11,6 +11,7 @@
from .config import get_version
from .database import SessionLocal
from .exception_capture import init_sentry
from .log import CONFIG as LOG_CONFIG
from .log import context_from_request, get_log_line
from .metrics import (
METRICS_REGISTRY,
Expand All @@ -22,6 +23,10 @@
)
from .routers import contacts, platform

logging.config.dictConfig(LOG_CONFIG)

web_logger = logging.getLogger("ctms.web")

app = FastAPI(
title="ConTact Management System (CTMS)",
description="CTMS API (work in progress)",
Expand Down Expand Up @@ -65,14 +70,21 @@ async def log_request_middleware(request: Request, call_next):
log_line = get_log_line(request, status_code, context.get("client_id"))
duration = time.monotonic() - start_time
duration_s = round(duration, 3)
context.update({"status_code": status_code, "duration_s": duration_s})

emit_response_metrics(context, get_metrics())
logger = structlog.get_logger("ctms.web")
emit_response_metrics(
path_template=context.get("path_template"),
method=context["method"],
duration_s=duration_s,
status_code=status_code,
client_id=context.get("client_id"),
metrics=get_metrics(),
)

context.update({"status_code": status_code, "duration_s": duration_s})
if response is None:
logger.error(log_line, **context)
web_logger.error(log_line, extra=context)
else:
logger.info(log_line, **context)
web_logger.info(log_line, extra=context)
return response


Expand Down
136 changes: 42 additions & 94 deletions ctms/log.py
Original file line number Diff line number Diff line change
@@ -1,113 +1,61 @@
"""Logging configuration"""

import logging
import logging.config
import sys
from typing import Any, Dict, List, Optional
from typing import Any, Dict, Optional

import structlog
from dockerflow.logging import request_id_context
from fastapi import Request
from starlette.routing import Match
from structlog.types import Processor

from ctms.config import Settings

def configure_logging(
use_mozlog: bool = True, logging_level: str = "INFO", log_sqlalchemy: bool = False
) -> dict:
"""Configure Python logging.
settings = Settings()

:param use_mozlog: If True, use MozLog format, appropriate for deployments.
If False, format logs for human consumption.
:param logging_level: The logging level, such as DEBUG or INFO.
:param log_sqlalchemy: Include SQLAlchemy engine logs, such as SQL statements
"""

if use_mozlog:
structlog_fmt_prep: Processor = structlog.stdlib.render_to_log_kwargs
structlog_dev_processors: List[Processor] = []
else:
structlog_fmt_prep = structlog.stdlib.ProcessorFormatter.wrap_for_formatter
structlog_dev_processors = [
structlog.stdlib.add_logger_name,
structlog.stdlib.add_log_level,
structlog.processors.TimeStamper(fmt="iso"),
]
logging_config = {
"version": 1,
"disable_existing_loggers": False,
"filters": {
"request_id": {
"()": "dockerflow.logging.RequestIdLogFilter",
},
CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"filters": {
"request_id": {
"()": "dockerflow.logging.RequestIdLogFilter",
},
"formatters": {
"dev_console": {
"()": structlog.stdlib.ProcessorFormatter,
"processor": structlog.dev.ConsoleRenderer(colors=True),
"foreign_pre_chain": structlog_dev_processors,
},
"mozlog_json": {
"()": "dockerflow.logging.JsonLogFormatter",
"logger_name": "ctms",
},
},
"formatters": {
"mozlog_json": {
"()": "dockerflow.logging.JsonLogFormatter",
"logger_name": "ctms",
},
"handlers": {
"humans": {
"class": "logging.StreamHandler",
"stream": sys.stdout,
"formatter": "dev_console",
"level": logging.DEBUG,
"filters": ["request_id"],
},
"null": {
"class": "logging.NullHandler",
},
"mozlog": {
"class": "logging.StreamHandler",
"stream": sys.stdout,
"formatter": "mozlog_json",
"level": logging.DEBUG,
"filters": ["request_id"],
},
"text": {
"format": "%(asctime)s %(levelname)-8s [%(rid)s] %(name)-15s %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
"root": {
"handlers": ["mozlog" if use_mozlog else "humans"],
"level": logging_level,
},
"handlers": {
"console": {
"level": settings.logging_level.name,
"class": "logging.StreamHandler",
"filters": ["request_id"],
"formatter": "mozlog_json" if settings.use_mozlog else "text",
"stream": sys.stdout,
},
"loggers": {
"alembic": {"level": logging_level},
"ctms": {"level": logging_level},
"uvicorn": {"level": logging_level},
"uvicorn.access": {"handlers": ["null"], "propagate": False},
"sqlalchemy.engine": {
"handlers": ["mozlog" if use_mozlog else "humans"],
"level": logging_level if log_sqlalchemy else logging.WARNING,
"propagate": False,
},
"null": {
"class": "logging.NullHandler",
},
}
logging.config.dictConfig(logging_config)

structlog_processors: List[Processor] = [structlog.stdlib.filter_by_level]
structlog_processors.extend(structlog_dev_processors)
structlog_processors.extend(
[
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.StackInfoRenderer(),
structlog.processors.format_exc_info,
structlog.processors.UnicodeDecoder(),
structlog_fmt_prep,
]
)
structlog.configure(
processors=structlog_processors,
context_class=dict,
logger_factory=structlog.stdlib.LoggerFactory(),
wrapper_class=structlog.stdlib.BoundLogger,
cache_logger_on_first_use=True,
)
return logging_config
},
"loggers": {
"": {"handlers": ["console"]},
"request.summary": {"level": logging.INFO},
"ctms": {"level": logging.DEBUG},
"uvicorn": {"level": logging.INFO},
"uvicorn.access": {"handlers": ["null"], "propagate": False},
"sqlalchemy.engine": {
"level": settings.logging_level.name
if settings.log_sqlalchemy
else logging.WARNING,
"propagate": False,
},
},
}


def context_from_request(request: Request) -> Dict:
Expand Down
14 changes: 7 additions & 7 deletions ctms/metrics.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Prometheus metrics for instrumentation and monitoring."""

from itertools import product
from typing import Any, Type, cast
from typing import Any, Optional, Type, cast

from fastapi import FastAPI
from fastapi.security import HTTPBasic
Expand Down Expand Up @@ -135,21 +135,22 @@ def init_metrics_labels(


def emit_response_metrics(
context: dict[str, Any], metrics: dict[str, Counter | Histogram]
path_template: Optional[str],
method: str,
duration_s: float,
status_code: int,
client_id: Optional[str],
metrics: dict[str, Counter | Histogram],
) -> None:
"""Emit metrics for a response."""
if not metrics:
return

path_template = context.get("path_template")
if not path_template:
# If no path_template, then it is not a known route, probably a 404.
# Don't emit a metric, which will add noise to data
return

method = context["method"]
duration_s = context["duration_s"]
status_code = context["status_code"]
status_code_family = str(status_code)[0] + "xx"

counter = cast(Counter, metrics["requests"])
Expand All @@ -167,7 +168,6 @@ def emit_response_metrics(
status_code_family=status_code_family,
).observe(duration_s)

client_id = context.get("client_id")
if client_id:
counter = cast(Counter, metrics["api_requests"])
counter.labels(
Expand Down
5 changes: 4 additions & 1 deletion migrations/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,10 @@

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# We don't want this logging setup during unit tests,
# see `alembic_command.upgrade(cfg, "head")` in `conftest.py`.
if not context.config.attributes.get("unit-tests", False):
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
Expand Down
21 changes: 2 additions & 19 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ dockerflow = {extras = ["fastapi"], version = "^2024.4.2"}
sentry-sdk = "^2.12.0"
lxml = "^5.2.2"
prometheus-client = "0.20.0"
structlog = "^24.4.0"
colorama = "^0.4.6"
psycopg2 = "^2.9.9"
google-auth = "^2.32.0"
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def engine(pytestconfig):
)

cfg = alembic_config.Config(os.path.join(APP_FOLDER, "alembic.ini"))

cfg.attributes["unit-tests"] = True
cfg.attributes["connection"] = test_engine
alembic_command.upgrade(cfg, "head")

Expand Down
11 changes: 6 additions & 5 deletions tests/unit/routers/contacts/test_api_post.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
"""Unit tests for POST /ctms (create record)"""

import logging
from uuid import UUID

import pytest
from structlog.testing import capture_logs

from tests.unit.conftest import SAMPLE_CONTACT_PARAMS

Expand Down Expand Up @@ -120,12 +120,13 @@ def _change_primary_email(contact):
_compare_written_contacts(saved_contacts[0], orig_sample, email_id)


def test_create_with_non_json_is_error(client):
def test_create_with_non_json_is_error(client, caplog):
"""When non-JSON is posted /ctms, a 422 is returned"""
data = b"this is not JSON"
with capture_logs() as cap_logs:
with caplog.at_level(logging.INFO, logger="ctms.web"):
resp = client.post("/ctms", content=data)

assert resp.status_code == 422
assert resp.json()["detail"][0]["msg"] == "JSON decode error"
assert len(cap_logs) == 1
assert "trace" not in cap_logs[0]
assert len(caplog.records) == 1
assert caplog.records[0].status_code == 422
Loading

0 comments on commit b42575d

Please sign in to comment.