Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add debug logging #198

Merged
merged 4 commits into from
Oct 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -564,6 +564,24 @@ options = StreamOptions(
client.stream(fql('Product.all().eventSource()'), options)
```

## Logging

Logging is handled using Python's standard `logging` package under the `fauna` namespace. Logs include the HTTP request with body (excluding the `Authorization` header) and the full HTTP response.

To enable logging:
```python
import logging
from fauna.client import Client
from fauna import fql

logging.basicConfig(
level=logging.DEBUG
)
client = Client()
client.query(fql('42'))
```
For configuration options or to set specific log levels, see Python's [Logging HOWTO](https://docs.python.org/3/howto/logging.html).

pnwpedro marked this conversation as resolved.
Show resolved Hide resolved
## Setup

```bash
Expand Down
5 changes: 4 additions & 1 deletion fauna/client/client.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import logging
from dataclasses import dataclass
from datetime import timedelta
from typing import Any, Dict, Iterator, Mapping, Optional, Union, List
Expand All @@ -13,6 +14,8 @@
from fauna.http.http_client import HTTPClient
from fauna.query import EventSource, Query, Page, fql

logger = logging.getLogger("fauna")

DefaultHttpConnectTimeout = timedelta(seconds=5)
DefaultHttpReadTimeout: Optional[timedelta] = None
DefaultHttpWriteTimeout = timedelta(seconds=5)
Expand Down Expand Up @@ -216,7 +219,7 @@ def __init__(
max_keepalive_connections=DefaultMaxIdleConnections,
keepalive_expiry=idle_timeout_s,
),
))
), logger)
fauna.global_http_client = c

self._session = fauna.global_http_client
Expand Down
54 changes: 48 additions & 6 deletions fauna/http/httpx_client.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import json
import logging
from contextlib import contextmanager
from json import JSONDecodeError
from typing import Mapping, Any, Optional, Iterator
Expand Down Expand Up @@ -50,9 +51,12 @@ def close(self) -> None:

class HTTPXClient(HTTPClient):

def __init__(self, client: httpx.Client):
def __init__(self,
client: httpx.Client,
logger: logging.Logger = logging.getLogger("fauna")):
super(HTTPXClient, self).__init__()
self._c = client
self._logger = logger

def request(
self,
Expand All @@ -69,14 +73,29 @@ def request(
json=data,
headers=headers,
)

if self._logger.isEnabledFor(logging.DEBUG):
headers_to_log = request.headers.copy()
headers_to_log.pop("Authorization")
findgriffin marked this conversation as resolved.
Show resolved Hide resolved
self._logger.debug(
f"query.request method={request.method} url={request.url} headers={headers_to_log} data={data}"
)

except httpx.InvalidURL as e:
raise ClientError("Invalid URL Format") from e

try:
return HTTPXResponse(self._c.send(
response = self._c.send(
request,
stream=False,
))
)

if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug(
f"query.response status_code={response.status_code} headers={response.headers} data={response.text}"
)

return HTTPXResponse(response)
except (httpx.HTTPError, httpx.InvalidURL) as e:
raise NetworkError("Exception re-raised from HTTP request") from e

Expand All @@ -87,14 +106,37 @@ def stream(
headers: Mapping[str, str],
data: Mapping[str, Any],
) -> Iterator[Any]:
with self._c.stream(
"POST", url=url, headers=headers, json=data) as response:
request = self._c.build_request(
method="POST",
url=url,
headers=headers,
json=data,
)

if self._logger.isEnabledFor(logging.DEBUG):
headers_to_log = request.headers.copy()
headers_to_log.pop("Authorization")
self._logger.debug(
f"stream.request method={request.method} url={request.url} headers={headers_to_log} data={data}"
)

response = self._c.send(
request=request,
stream=True,
)

try:
yield self._transform(response)
finally:
response.close()

def _transform(self, response):
try:
for line in response.iter_lines():
yield json.loads(line)
loaded = json.loads(line)
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug(f"stream.data data={loaded}")
yield loaded
except httpx.ReadTimeout as e:
raise NetworkError("Stream timeout") from e
except (httpx.HTTPError, httpx.InvalidURL) as e:
Expand Down
12 changes: 8 additions & 4 deletions tests/integration/test_stream.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
import threading
import time

import httpx
import pytest

from fauna import fql
from fauna.client import Client, StreamOptions
from fauna.errors import ClientError, NetworkError, RetryableFaunaException, QueryRuntimeError
from fauna.errors import ClientError, RetryableFaunaException, QueryRuntimeError, NetworkError
from fauna.http.httpx_client import HTTPXClient


Expand Down Expand Up @@ -107,11 +106,16 @@ def test_max_retries(scoped_secret):

count = [0]

def stream_func(*args, **kwargs):
old_send = httpx_client.send

def send_func(*args, **kwargs):
if not kwargs['stream']:
return old_send(*args, **kwargs)

count[0] += 1
raise NetworkError('foo')

httpx_client.stream = stream_func
httpx_client.send = send_func

count[0] = 0
with pytest.raises(RetryableFaunaException):
Expand Down
Loading