diff --git a/src/openai/_base_client.py b/src/openai/_base_client.py index cf4571bf45..eedce363a6 100644 --- a/src/openai/_base_client.py +++ b/src/openai/_base_client.py @@ -61,7 +61,16 @@ ModelBuilderProtocol, not_given, ) -from ._utils import SensitiveHeadersFilter, is_dict, is_list, asyncify, is_given, lru_cache, is_mapping +from ._utils import ( + SensitiveHeadersFilter, + is_dict, + is_list, + asyncify, + is_given, + lru_cache, + is_mapping, + redact_sensitive_headers, +) from ._compat import PYDANTIC_V1, model_copy, model_dump from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type from ._response import ( @@ -1036,15 +1045,16 @@ def request( log.debug("Raising connection error") raise APIConnectionError(request=request) from err - log.debug( - 'HTTP Response: %s %s "%i %s" %s', - request.method, - request.url, - response.status_code, - response.reason_phrase, - response.headers, - ) - log.debug("request_id: %s", response.headers.get("x-request-id")) + if log.isEnabledFor(logging.DEBUG): + log.debug( + 'HTTP Response: %s %s "%i %s" %s', + request.method, + request.url, + response.status_code, + response.reason_phrase, + redact_sensitive_headers(response.headers), + ) + log.debug("request_id: %s", response.headers.get("x-request-id")) try: response.raise_for_status() @@ -1635,15 +1645,16 @@ async def request( log.debug("Raising connection error") raise APIConnectionError(request=request) from err - log.debug( - 'HTTP Response: %s %s "%i %s" %s', - request.method, - request.url, - response.status_code, - response.reason_phrase, - response.headers, - ) - log.debug("request_id: %s", response.headers.get("x-request-id")) + if log.isEnabledFor(logging.DEBUG): + log.debug( + 'HTTP Response: %s %s "%i %s" %s', + request.method, + request.url, + response.status_code, + response.reason_phrase, + redact_sensitive_headers(response.headers), + ) + log.debug("request_id: %s", response.headers.get("x-request-id")) try: response.raise_for_status() diff --git a/src/openai/_utils/__init__.py b/src/openai/_utils/__init__.py index 963c83b6d4..15601ec59d 100644 --- a/src/openai/_utils/__init__.py +++ b/src/openai/_utils/__init__.py @@ -1,4 +1,8 @@ -from ._logs import SensitiveHeadersFilter as SensitiveHeadersFilter +from ._logs import ( + SENSITIVE_HEADERS as SENSITIVE_HEADERS, + SensitiveHeadersFilter as SensitiveHeadersFilter, + redact_sensitive_headers as redact_sensitive_headers, +) from ._sync import asyncify as asyncify from ._proxy import LazyProxy as LazyProxy from ._utils import ( diff --git a/src/openai/_utils/_logs.py b/src/openai/_utils/_logs.py index 376946933c..450131a57f 100644 --- a/src/openai/_utils/_logs.py +++ b/src/openai/_utils/_logs.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import os import logging +from typing import Any, Mapping from typing_extensions import override from ._utils import is_dict @@ -11,6 +14,13 @@ SENSITIVE_HEADERS = {"api-key", "authorization"} +def redact_sensitive_headers(headers: Mapping[str, Any]) -> dict[str, Any]: + return { + k: (v if str(k).lower() not in SENSITIVE_HEADERS else "") + for k, v in headers.items() + } + + def _basic_config() -> None: # e.g. [2023-10-05 14:12:26 - openai._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK" logging.basicConfig( @@ -35,8 +45,5 @@ class SensitiveHeadersFilter(logging.Filter): @override def filter(self, record: logging.LogRecord) -> bool: if is_dict(record.args) and "headers" in record.args and is_dict(record.args["headers"]): - headers = record.args["headers"] = {**record.args["headers"]} - for header in headers: - if str(header).lower() in SENSITIVE_HEADERS: - headers[header] = "" + record.args["headers"] = redact_sensitive_headers({**record.args["headers"]}) return True diff --git a/tests/test_utils/test_logging.py b/tests/test_utils/test_logging.py index cc018012e2..2057ff00c6 100644 --- a/tests/test_utils/test_logging.py +++ b/tests/test_utils/test_logging.py @@ -1,9 +1,12 @@ import logging from typing import Any, Dict, cast +import httpx import pytest +from respx import MockRouter -from openai._utils import SensitiveHeadersFilter +from openai import OpenAI +from openai._utils import SensitiveHeadersFilter, redact_sensitive_headers @pytest.fixture @@ -98,3 +101,44 @@ def test_standard_debug_msg(logger_with_filter: logging.Logger, caplog: pytest.L with caplog.at_level(logging.DEBUG): logger_with_filter.debug("Sending HTTP Request: %s %s", "POST", "chat/completions") assert caplog.messages[0] == "Sending HTTP Request: POST chat/completions" + + +def test_response_headers_redaction() -> None: + """Verify sensitive response headers are redacted by the shared helper.""" + raw_headers = { + "content-type": "application/json", + "authorization": "Bearer sk-secret-key", + "api-key": "my-secret-api-key", + "x-request-id": "req_abc123", + } + filtered = redact_sensitive_headers(raw_headers) + assert filtered["content-type"] == "application/json" + assert filtered["authorization"] == "" + assert filtered["api-key"] == "" + assert filtered["x-request-id"] == "req_abc123" + + +@pytest.mark.respx(base_url="https://api.openai.com/v1") +def test_response_header_redaction_in_client( + respx_mock: MockRouter, + caplog: pytest.LogCaptureFixture, +) -> None: + """Integration test: verify _base_client.py redacts sensitive response headers in actual log output.""" + respx_mock.post("/chat/completions").mock( + return_value=httpx.Response( + 200, + json={"id": "chatcmpl-test", "object": "chat.completion", "choices": [], "created": 0, "model": "gpt-4"}, + headers={"authorization": "Bearer secret", "x-request-id": "req_123"}, + ) + ) + + client = OpenAI(api_key="test-key", base_url="https://api.openai.com/v1") + + with caplog.at_level(logging.DEBUG, logger="openai"): + client.chat.completions.create(messages=[], model="gpt-4") + + response_logs = [r for r in caplog.records if r.getMessage().startswith("HTTP Response:")] + assert len(response_logs) >= 1, "Expected at least one 'HTTP Response:' log line" + msg = response_logs[0].getMessage() + assert "secret" not in msg, "Sensitive header value should be redacted in log output" + assert "" in msg, "Redacted placeholder should appear in log output"