Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 30 additions & 19 deletions src/openai/_base_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,16 @@
ModelBuilderProtocol,
not_given,
)
from ._utils import SensitiveHeadersFilter, is_dict, is_list, asyncify, is_given, lru_cache, is_mapping
from ._utils import (
SensitiveHeadersFilter,
is_dict,
is_list,
asyncify,
is_given,
lru_cache,
is_mapping,
redact_sensitive_headers,
)
from ._compat import PYDANTIC_V1, model_copy, model_dump
from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type
from ._response import (
Expand Down Expand Up @@ -1036,15 +1045,16 @@ def request(
log.debug("Raising connection error")
raise APIConnectionError(request=request) from err

log.debug(
'HTTP Response: %s %s "%i %s" %s',
request.method,
request.url,
response.status_code,
response.reason_phrase,
response.headers,
)
log.debug("request_id: %s", response.headers.get("x-request-id"))
if log.isEnabledFor(logging.DEBUG):
log.debug(
'HTTP Response: %s %s "%i %s" %s',
request.method,
request.url,
response.status_code,
response.reason_phrase,
redact_sensitive_headers(response.headers),
)
log.debug("request_id: %s", response.headers.get("x-request-id"))

try:
response.raise_for_status()
Expand Down Expand Up @@ -1635,15 +1645,16 @@ async def request(
log.debug("Raising connection error")
raise APIConnectionError(request=request) from err

log.debug(
'HTTP Response: %s %s "%i %s" %s',
request.method,
request.url,
response.status_code,
response.reason_phrase,
response.headers,
)
log.debug("request_id: %s", response.headers.get("x-request-id"))
if log.isEnabledFor(logging.DEBUG):
log.debug(
'HTTP Response: %s %s "%i %s" %s',
request.method,
request.url,
response.status_code,
response.reason_phrase,
redact_sensitive_headers(response.headers),
)
log.debug("request_id: %s", response.headers.get("x-request-id"))

try:
response.raise_for_status()
Expand Down
6 changes: 5 additions & 1 deletion src/openai/_utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
from ._logs import SensitiveHeadersFilter as SensitiveHeadersFilter
from ._logs import (
SENSITIVE_HEADERS as SENSITIVE_HEADERS,
SensitiveHeadersFilter as SensitiveHeadersFilter,
redact_sensitive_headers as redact_sensitive_headers,
)
from ._sync import asyncify as asyncify
from ._proxy import LazyProxy as LazyProxy
from ._utils import (
Expand Down
15 changes: 11 additions & 4 deletions src/openai/_utils/_logs.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
from __future__ import annotations

import os
import logging
from typing import Any, Mapping
from typing_extensions import override

from ._utils import is_dict
Expand All @@ -11,6 +14,13 @@
SENSITIVE_HEADERS = {"api-key", "authorization"}


def redact_sensitive_headers(headers: Mapping[str, Any]) -> dict[str, Any]:
return {
k: (v if str(k).lower() not in SENSITIVE_HEADERS else "<redacted>")
for k, v in headers.items()
}


def _basic_config() -> None:
# e.g. [2023-10-05 14:12:26 - openai._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK"
logging.basicConfig(
Expand All @@ -35,8 +45,5 @@ class SensitiveHeadersFilter(logging.Filter):
@override
def filter(self, record: logging.LogRecord) -> bool:
if is_dict(record.args) and "headers" in record.args and is_dict(record.args["headers"]):
headers = record.args["headers"] = {**record.args["headers"]}
for header in headers:
if str(header).lower() in SENSITIVE_HEADERS:
headers[header] = "<redacted>"
record.args["headers"] = redact_sensitive_headers({**record.args["headers"]})
return True
46 changes: 45 additions & 1 deletion tests/test_utils/test_logging.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
import logging
from typing import Any, Dict, cast

import httpx
import pytest
from respx import MockRouter

from openai._utils import SensitiveHeadersFilter
from openai import OpenAI
from openai._utils import SensitiveHeadersFilter, redact_sensitive_headers


@pytest.fixture
Expand Down Expand Up @@ -98,3 +101,44 @@ def test_standard_debug_msg(logger_with_filter: logging.Logger, caplog: pytest.L
with caplog.at_level(logging.DEBUG):
logger_with_filter.debug("Sending HTTP Request: %s %s", "POST", "chat/completions")
assert caplog.messages[0] == "Sending HTTP Request: POST chat/completions"


def test_response_headers_redaction() -> None:
"""Verify sensitive response headers are redacted by the shared helper."""
raw_headers = {
"content-type": "application/json",
"authorization": "Bearer sk-secret-key",
"api-key": "my-secret-api-key",
"x-request-id": "req_abc123",
}
filtered = redact_sensitive_headers(raw_headers)
assert filtered["content-type"] == "application/json"
assert filtered["authorization"] == "<redacted>"
assert filtered["api-key"] == "<redacted>"
assert filtered["x-request-id"] == "req_abc123"


@pytest.mark.respx(base_url="https://api.openai.com/v1")
def test_response_header_redaction_in_client(
respx_mock: MockRouter,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Integration test: verify _base_client.py redacts sensitive response headers in actual log output."""
respx_mock.post("/chat/completions").mock(
return_value=httpx.Response(
200,
json={"id": "chatcmpl-test", "object": "chat.completion", "choices": [], "created": 0, "model": "gpt-4"},
headers={"authorization": "Bearer secret", "x-request-id": "req_123"},
)
)

client = OpenAI(api_key="test-key", base_url="https://api.openai.com/v1")

with caplog.at_level(logging.DEBUG, logger="openai"):
client.chat.completions.create(messages=[], model="gpt-4")

response_logs = [r for r in caplog.records if r.getMessage().startswith("HTTP Response:")]
assert len(response_logs) >= 1, "Expected at least one 'HTTP Response:' log line"
msg = response_logs[0].getMessage()
assert "secret" not in msg, "Sensitive header value should be redacted in log output"
assert "<redacted>" in msg, "Redacted placeholder should appear in log output"