Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions langfuse/api/blob_storage_integrations/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ def upsert_blob_storage_integration(
secret_access_key: typing.Optional[str] = OMIT,
prefix: typing.Optional[str] = OMIT,
export_start_date: typing.Optional[dt.datetime] = OMIT,
compressed: typing.Optional[bool] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> BlobStorageIntegrationResponse:
"""
Expand Down Expand Up @@ -139,6 +140,9 @@ def upsert_blob_storage_integration(
export_start_date : typing.Optional[dt.datetime]
Custom start date for exports (required when exportMode is FROM_CUSTOM_DATE)

compressed : typing.Optional[bool]
Enable gzip compression for exported files (.csv.gz, .json.gz, .jsonl.gz). Defaults to true.

request_options : typing.Optional[RequestOptions]
Request-specific configuration.

Expand Down Expand Up @@ -191,6 +195,7 @@ def upsert_blob_storage_integration(
secret_access_key=secret_access_key,
prefix=prefix,
export_start_date=export_start_date,
compressed=compressed,
request_options=request_options,
)
return _response.data
Expand Down Expand Up @@ -348,6 +353,7 @@ async def upsert_blob_storage_integration(
secret_access_key: typing.Optional[str] = OMIT,
prefix: typing.Optional[str] = OMIT,
export_start_date: typing.Optional[dt.datetime] = OMIT,
compressed: typing.Optional[bool] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> BlobStorageIntegrationResponse:
"""
Expand Down Expand Up @@ -393,6 +399,9 @@ async def upsert_blob_storage_integration(
export_start_date : typing.Optional[dt.datetime]
Custom start date for exports (required when exportMode is FROM_CUSTOM_DATE)

compressed : typing.Optional[bool]
Enable gzip compression for exported files (.csv.gz, .json.gz, .jsonl.gz). Defaults to true.

request_options : typing.Optional[RequestOptions]
Request-specific configuration.

Expand Down Expand Up @@ -453,6 +462,7 @@ async def main() -> None:
secret_access_key=secret_access_key,
prefix=prefix,
export_start_date=export_start_date,
compressed=compressed,
request_options=request_options,
)
return _response.data
Expand Down
10 changes: 10 additions & 0 deletions langfuse/api/blob_storage_integrations/raw_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ def upsert_blob_storage_integration(
secret_access_key: typing.Optional[str] = OMIT,
prefix: typing.Optional[str] = OMIT,
export_start_date: typing.Optional[dt.datetime] = OMIT,
compressed: typing.Optional[bool] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> HttpResponse[BlobStorageIntegrationResponse]:
"""
Expand Down Expand Up @@ -196,6 +197,9 @@ def upsert_blob_storage_integration(
export_start_date : typing.Optional[dt.datetime]
Custom start date for exports (required when exportMode is FROM_CUSTOM_DATE)

compressed : typing.Optional[bool]
Enable gzip compression for exported files (.csv.gz, .json.gz, .jsonl.gz). Defaults to true.

request_options : typing.Optional[RequestOptions]
Request-specific configuration.

Expand All @@ -221,6 +225,7 @@ def upsert_blob_storage_integration(
"fileType": file_type,
"exportMode": export_mode,
"exportStartDate": export_start_date,
"compressed": compressed,
},
request_options=request_options,
omit=OMIT,
Expand Down Expand Up @@ -623,6 +628,7 @@ async def upsert_blob_storage_integration(
secret_access_key: typing.Optional[str] = OMIT,
prefix: typing.Optional[str] = OMIT,
export_start_date: typing.Optional[dt.datetime] = OMIT,
compressed: typing.Optional[bool] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> AsyncHttpResponse[BlobStorageIntegrationResponse]:
"""
Expand Down Expand Up @@ -668,6 +674,9 @@ async def upsert_blob_storage_integration(
export_start_date : typing.Optional[dt.datetime]
Custom start date for exports (required when exportMode is FROM_CUSTOM_DATE)

compressed : typing.Optional[bool]
Enable gzip compression for exported files (.csv.gz, .json.gz, .jsonl.gz). Defaults to true.

request_options : typing.Optional[RequestOptions]
Request-specific configuration.

Expand All @@ -693,6 +702,7 @@ async def upsert_blob_storage_integration(
"fileType": file_type,
"exportMode": export_mode,
"exportStartDate": export_start_date,
"compressed": compressed,
},
request_options=request_options,
omit=OMIT,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ class BlobStorageIntegrationResponse(UniversalBaseModel):
export_start_date: typing_extensions.Annotated[
typing.Optional[dt.datetime], FieldMetadata(alias="exportStartDate")
] = None
compressed: bool
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🔴 is declared as a required field () with no default value, meaning any older self-hosted Langfuse server that does not yet return this field in API responses will cause Pydantic to raise a , crashing and . Fix by changing to to match the server-side default and maintain backward compatibility.

Extended reasoning...

The Bug

at line 43 declares — a required Pydantic field with no default. Pydantic v2 raises a if a required field is absent from the parsed data. There is no mechanism in or the setting that compensates for missing required fields — only permits unexpected extra keys, not absent required ones.

Code Path That Triggers It

Both and ultimately call in . If the JSON response from the server lacks the key, this call raises .

Why Existing Code Does Not Prevent It

The setting is insufficient. means Pydantic tolerates extra keys beyond what the model declares — it does not provide defaults for absent declared fields. does not override or validators to fill in missing required fields. The only way to make a Pydantic field tolerant of absence is a Python-level default (, , etc.).

Impact

The Langfuse Python SDK is used against both langfuse.com (cloud) and self-hosted instances. Self-hosted users on older server versions — those deployed before the feature was added — will receive API responses without the key. Every call to list or upsert blob storage integrations will crash with an unhandled . This is a hard failure for anyone running an older self-hosted Langfuse server with a newer SDK.

Addressing the Refutation

The refutation argues this is auto-generated from an API spec where the server contract guarantees will always be present. This holds for the current server version, but Langfuse is self-hostable and users frequently run older instances. A new SDK talking to an older server is a completely normal scenario. The SDK own established pattern — used for , , , , and — is to give defaults to fields added over time. The same principle applies here. Furthermore, and were always part of the API; is genuinely new and will be absent from older server responses.

Step-by-Step Proof

  1. User runs self-hosted Langfuse server at version X (predating the field addition).
  2. User upgrades their Python SDK to the version containing this PR.
  3. User calls .
  4. Server returns JSON without : .
  5. SDK calls , constructing objects.
  6. Pydantic raises: .
  7. Exception propagates; the SDK call fails entirely.

Fix: Change to in . This matches the server-side default and is consistent with how other recently-added fields in the same model are handled.

next_sync_at: typing_extensions.Annotated[
typing.Optional[dt.datetime], FieldMetadata(alias="nextSyncAt")
] = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,11 @@ class CreateBlobStorageIntegrationRequest(UniversalBaseModel):
Custom start date for exports (required when exportMode is FROM_CUSTOM_DATE)
"""

compressed: typing.Optional[bool] = pydantic.Field(default=None)
"""
Enable gzip compression for exported files (.csv.gz, .json.gz, .jsonl.gz). Defaults to true.
"""

model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(
extra="allow", frozen=True
)
Loading