Skip to content

Commit e89b907

Browse files
committed
Allowed reasoning and max_output_tokens attributes
1 parent 07ca1b1 commit e89b907

File tree

6 files changed

+9
-38
lines changed

6 files changed

+9
-38
lines changed

lightspeed-stack.yaml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,10 @@ llama_stack:
1212
# The instance would have already been started with a llama-stack-run.yaml file
1313
use_as_library_client: false
1414
# Alternative for "as library use"
15-
# use_as_library_client: true
16-
# library_client_config_path: <path-to-llama-stack-run.yaml-file>
17-
url: http://llama-stack:8321
18-
api_key: xyzzy
15+
use_as_library_client: true
16+
library_client_config_path: run.yaml
17+
# url: http://llama-stack:8321
18+
# api_key: xyzzy
1919
user_data_collection:
2020
feedback_enabled: true
2121
feedback_storage: "/tmp/data/feedback"

run.yaml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ apis:
1515
benchmarks: []
1616
datasets: []
1717
image_name: starter
18-
external_providers_dir: ${env.EXTERNAL_PROVIDERS_DIR}
1918

2019
providers:
2120
inference:

src/app/endpoints/responses.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -165,13 +165,9 @@ async def responses_endpoint_handler(
165165
if responses_request.reasoning is not None:
166166
logger.warning("reasoning is not yet supported in LCORE and will be ignored")
167167
responses_request.reasoning = None
168-
if responses_request.max_output_tokens is not None:
169-
logger.warning(
170-
"max_output_tokens is not yet supported in LCORE and will be ignored"
171-
)
172-
responses_request.max_output_tokens = None
173168

174169
responses_request = responses_request.model_copy(deep=True)
170+
175171
check_configuration_loaded(configuration)
176172
responses_request.instructions = get_system_prompt(
177173
responses_request.instructions, field_name="instructions"

src/utils/types.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -214,6 +214,10 @@ class ResponsesApiParams(BaseModel):
214214
default=None,
215215
description="Reasoning configuration for the response",
216216
)
217+
safety_identifier: Optional[str] = Field(
218+
default=None,
219+
description="Stable identifier for safety monitoring and abuse detection",
220+
)
217221
store: bool = Field(description="Whether to store the response")
218222
stream: bool = Field(description="Whether to stream the response")
219223
temperature: Optional[float] = Field(

tests/e2e/features/responses.feature

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,6 @@ Feature: Responses endpoint API tests
1616
Then The status code of the response is 200
1717
And The body of the response contains hello
1818

19-
# https://redhat.atlassian.net/browse/LCORE-1583
20-
@skip
2119
Scenario: Responses accepts passthrough parameters with valid types
2220
Given The system is in default state
2321
And I set the Authorization header to Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6Ikpva
@@ -28,8 +26,6 @@ Feature: Responses endpoint API tests
2826
"model": "{PROVIDER}/{MODEL}",
2927
"stream": false,
3028
"instructions": "You are a helpful assistant.",
31-
"prompt": {"id": "e2e_responses_passthrough_prompt"},
32-
"reasoning": {"effort": "low"},
3329
"safety_identifier": "e2e-responses-passthrough",
3430
"text": {"format": {"type": "text"}},
3531
"tool_choice": "auto",
@@ -51,8 +47,6 @@ Feature: Responses endpoint API tests
5147
"status": "completed",
5248
"model": "{PROVIDER}/{MODEL}",
5349
"instructions": "You are a helpful assistant.",
54-
"prompt": {"id": "e2e_responses_passthrough_prompt"},
55-
"reasoning": {"effort": "low"},
5650
"safety_identifier": "e2e-responses-passthrough",
5751
"text": {"format": {"type": "text"}},
5852
"tool_choice": "auto",

tests/e2e/test_list.txt

Lines changed: 0 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1 @@
1-
features/faiss.feature
2-
features/inline_rag.feature
3-
features/smoketests.feature
4-
features/authorized_noop.feature
5-
features/authorized_noop_token.feature
6-
features/authorized_rh_identity.feature
7-
features/rbac.feature
8-
features/conversations.feature
9-
features/conversation_cache_v2.feature
10-
features/feedback.feature
11-
features/health.feature
12-
features/info.feature
131
features/responses.feature
14-
features/responses_streaming.feature
15-
features/query.feature
16-
features/rlsapi_v1.feature
17-
features/rlsapi_v1_errors.feature
18-
features/streaming_query.feature
19-
features/rest_api.feature
20-
features/models.feature
21-
features/proxy.feature
22-
features/tls.feature
23-
features/mcp.feature

0 commit comments

Comments
 (0)