From a7a2cff0319db54a64c06171db4f3687c7f43009 Mon Sep 17 00:00:00 2001
From: "fern-api[bot]" <115122769+fern-api[bot]@users.noreply.github.com>
Date: Fri, 27 Feb 2026 16:01:50 +0000
Subject: [PATCH] SDK regeneration
---
.fern/metadata.json | 6 +-
poetry.lock | 20 +-
pyproject.toml | 2 +-
reference.md | 5105 -----------------
src/hume/base_client.py | 47 +-
src/hume/core/__init__.py | 18 +-
src/hume/core/client_wrapper.py | 19 +-
src/hume/core/custom_pagination.py | 152 -
src/hume/core/http_client.py | 195 +-
src/hume/core/logging.py | 107 +
src/hume/core/pydantic_utilities.py | 33 +-
src/hume/empathic_voice/__init__.py | 3 +
src/hume/empathic_voice/chat_groups/client.py | 16 +-
src/hume/empathic_voice/chats/client.py | 8 +-
src/hume/empathic_voice/configs/client.py | 36 +-
src/hume/empathic_voice/prompts/client.py | 65 +-
src/hume/empathic_voice/prompts/raw_client.py | 26 +
src/hume/empathic_voice/tools/client.py | 28 +-
src/hume/empathic_voice/types/__init__.py | 3 +
.../types/language_model_type.py | 5 +
.../types/model_provider_enum.py | 1 +
.../types/posted_config_prompt_spec.py | 2 +
.../empathic_voice/types/posted_ellm_model.py | 4 +-
.../types/prompt_expansion_spec.py | 28 +
.../types/return_chat_event_type.py | 14 +-
.../empathic_voice/types/return_ellm_model.py | 4 +-
.../empathic_voice/types/return_prompt.py | 2 +
src/hume/tts/voices/client.py | 4 +-
tests/conftest.py | 37 +-
tests/wire/conftest.py | 34 +-
tests/wire/test_empathicVoice_chatGroups.py | 21 +-
tests/wire/test_empathicVoice_chats.py | 10 +-
tests/wire/test_empathicVoice_configs.py | 36 +-
tests/wire/test_empathicVoice_prompts.py | 32 +-
tests/wire/test_empathicVoice_tools.py | 30 +-
tests/wire/test_tts.py | 2 +-
tests/wire/test_tts_voices.py | 2 +-
wiremock/wiremock-mappings.json | 2 +-
38 files changed, 613 insertions(+), 5546 deletions(-)
delete mode 100644 reference.md
delete mode 100644 src/hume/core/custom_pagination.py
create mode 100644 src/hume/core/logging.py
create mode 100644 src/hume/empathic_voice/types/prompt_expansion_spec.py
diff --git a/.fern/metadata.json b/.fern/metadata.json
index a30eb0cb..a78d8e44 100644
--- a/.fern/metadata.json
+++ b/.fern/metadata.json
@@ -1,7 +1,7 @@
{
- "cliVersion": "3.70.0",
+ "cliVersion": "3.90.2",
"generatorName": "fernapi/fern-python-sdk",
- "generatorVersion": "4.55.3",
+ "generatorVersion": "4.60.1",
"generatorConfig": {
"should_generate_websocket_clients": true,
"pyproject_python_version": ">=3.9,<4",
@@ -64,5 +64,5 @@
}
]
},
- "sdkVersion": "0.13.8"
+ "sdkVersion": "0.13.9"
}
\ No newline at end of file
diff --git a/poetry.lock b/poetry.lock
index f40bffbe..8cadfe66 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -241,13 +241,13 @@ css = ["tinycss2 (>=1.1.0,<1.5)"]
[[package]]
name = "certifi"
-version = "2026.1.4"
+version = "2026.2.25"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.7"
files = [
- {file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"},
- {file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"},
+ {file = "certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa"},
+ {file = "certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7"},
]
[[package]]
@@ -1298,13 +1298,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>
[[package]]
name = "jupyterlab"
-version = "4.5.3"
+version = "4.5.5"
description = "JupyterLab computational environment"
optional = true
python-versions = ">=3.9"
files = [
- {file = "jupyterlab-4.5.3-py3-none-any.whl", hash = "sha256:63c9f3a48de72ba00df766ad6eed416394f5bb883829f11eeff0872302520ba7"},
- {file = "jupyterlab-4.5.3.tar.gz", hash = "sha256:4a159f71067cb38e4a82e86a42de8e7e926f384d7f2291964f282282096d27e8"},
+ {file = "jupyterlab-4.5.5-py3-none-any.whl", hash = "sha256:a35694a40a8e7f2e82f387472af24e61b22adcce87b5a8ab97a5d9c486202a6d"},
+ {file = "jupyterlab-4.5.5.tar.gz", hash = "sha256:eac620698c59eb810e1729909be418d9373d18137cac66637141abba613b3fda"},
]
[package.dependencies]
@@ -1746,18 +1746,18 @@ files = [
[[package]]
name = "notebook"
-version = "7.5.3"
+version = "7.5.4"
description = "Jupyter Notebook - A web-based notebook environment for interactive computing"
optional = true
python-versions = ">=3.9"
files = [
- {file = "notebook-7.5.3-py3-none-any.whl", hash = "sha256:c997bfa1a2a9eb58c9bbb7e77d50428befb1033dd6f02c482922e96851d67354"},
- {file = "notebook-7.5.3.tar.gz", hash = "sha256:393ceb269cf9fdb02a3be607a57d7bd5c2c14604f1818a17dbeb38e04f98cbfa"},
+ {file = "notebook-7.5.4-py3-none-any.whl", hash = "sha256:860e31782b3d3a25ca0819ff039f5cf77845d1bf30c78ef9528b88b25e0a9850"},
+ {file = "notebook-7.5.4.tar.gz", hash = "sha256:b928b2ba22cb63aa83df2e0e76fe3697950a0c1c4a41b84ebccf1972b1bb5771"},
]
[package.dependencies]
jupyter-server = ">=2.4.0,<3"
-jupyterlab = ">=4.5.3,<4.6"
+jupyterlab = ">=4.5.5,<4.6"
jupyterlab-server = ">=2.28.0,<3"
notebook-shim = ">=0.2,<0.3"
tornado = ">=6.2.0"
diff --git a/pyproject.toml b/pyproject.toml
index 9cb33ec1..69deb566 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ dynamic = ["version"]
[tool.poetry]
name = "hume"
-version = "0.13.8"
+version = "0.13.9"
description = "A Python SDK for Hume AI"
readme = "README.md"
authors = []
diff --git a/reference.md b/reference.md
deleted file mode 100644
index 9b22384f..00000000
--- a/reference.md
+++ /dev/null
@@ -1,5105 +0,0 @@
-# Reference
-## Tts
-client.tts.synthesize_json(...) -> AsyncHttpResponse[ReturnTts]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Synthesizes one or more input texts into speech using the specified voice. If no voice is provided, a novel voice will be generated dynamically. Optionally, additional context can be included to influence the speech's style and prosody.
-
-The response includes the base64-encoded audio and metadata in JSON format.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-from hume.tts import FormatMp3, PostedContextWithUtterances, PostedUtterance
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.tts.synthesize_json(
- context=PostedContextWithUtterances(
- utterances=[
- PostedUtterance(
- text="How can people see beauty so differently?",
- description="A curious student with a clear and respectful tone, seeking clarification on Hume's ideas with a straightforward question.",
- )
- ],
- ),
- format=FormatMp3(),
- num_generations=1,
- utterances=[
- PostedUtterance(
- text="Beauty is no quality in things themselves: It exists merely in the mind which contemplates them.",
- description="Middle-aged masculine voice with a clear, rhythmic Scots lilt, rounded vowels, and a warm, steady tone with an articulate, academic quality.",
- )
- ],
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**utterances:** `typing.Sequence[PostedUtterance]`
-
-A list of **Utterances** to be converted to speech output.
-
-An **Utterance** is a unit of input for [Octave](/docs/text-to-speech-tts/overview), and includes input `text`, an optional `description` to serve as the prompt for how the speech should be delivered, an optional `voice` specification, and additional controls to guide delivery for `speed` and `trailing_silence`.
-
-
-
-
-
--
-
-**context:** `typing.Optional[PostedContext]` — Utterances to use as context for generating consistent speech style and prosody across multiple requests. These will not be converted to speech output.
-
-
-
-
-
--
-
-**format:** `typing.Optional[Format]` — Specifies the output audio file format.
-
-
-
-
-
--
-
-**include_timestamp_types:** `typing.Optional[typing.Sequence[TimestampType]]` — The set of timestamp types to include in the response. Only supported for Octave 2 requests.
-
-
-
-
-
--
-
-**num_generations:** `typing.Optional[int]`
-
-Number of audio generations to produce from the input utterances.
-
-Using `num_generations` enables faster processing than issuing multiple sequential requests. Additionally, specifying `num_generations` allows prosody continuation across all generations without repeating context, ensuring each generation sounds slightly different while maintaining contextual consistency.
-
-
-
-
-
--
-
-**split_utterances:** `typing.Optional[bool]`
-
-Controls how audio output is segmented in the response.
-
-- When **enabled** (`true`), input utterances are automatically split into natural-sounding speech segments.
-
-- When **disabled** (`false`), the response maintains a strict one-to-one mapping between input utterances and output snippets.
-
-This setting affects how the `snippets` array is structured in the response, which may be important for applications that need to track the relationship between input text and generated audio segments. When setting to `false`, avoid including utterances with long `text`, as this can result in distorted output.
-
-
-
-
-
--
-
-**strip_headers:** `typing.Optional[bool]` — If enabled, the audio for all the chunks of a generation, once concatenated together, will constitute a single audio file. Otherwise, if disabled, each chunk's audio will be its own audio file, each with its own headers (if applicable).
-
-
-
-
-
--
-
-**version:** `typing.Optional[OctaveVersion]`
-
-Selects the Octave model version used to synthesize speech for this request. If you omit this field, Hume automatically routes the request to the most appropriate model. Setting a specific version ensures stable and repeatable behavior across requests.
-
-Use `2` to opt into the latest Octave capabilities. When you specify version `2`, you must also provide a `voice`. Requests that set `version: 2` without a voice will be rejected.
-
-For a comparison of Octave versions, see the [Octave versions](/docs/text-to-speech-tts/overview#octave-versions) section in the TTS overview.
-
-
-
-
-
--
-
-**instant_mode:** `typing.Optional[bool]`
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.tts.synthesize_file(...) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Synthesizes one or more input texts into speech using the specified voice. If no voice is provided, a novel voice will be generated dynamically. Optionally, additional context can be included to influence the speech's style and prosody.
-
-The response contains the generated audio file in the requested format.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-from hume.tts import FormatMp3, PostedContextWithGenerationId, PostedUtterance
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.tts.synthesize_file(
- context=PostedContextWithGenerationId(
- generation_id="09ad914d-8e7f-40f8-a279-e34f07f7dab2",
- ),
- format=FormatMp3(),
- num_generations=1,
- utterances=[
- PostedUtterance(
- text="Beauty is no quality in things themselves: It exists merely in the mind which contemplates them.",
- description="Middle-aged masculine voice with a clear, rhythmic Scots lilt, rounded vowels, and a warm, steady tone with an articulate, academic quality.",
- )
- ],
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**utterances:** `typing.Sequence[PostedUtterance]`
-
-A list of **Utterances** to be converted to speech output.
-
-An **Utterance** is a unit of input for [Octave](/docs/text-to-speech-tts/overview), and includes input `text`, an optional `description` to serve as the prompt for how the speech should be delivered, an optional `voice` specification, and additional controls to guide delivery for `speed` and `trailing_silence`.
-
-
-
-
-
--
-
-**context:** `typing.Optional[PostedContext]` — Utterances to use as context for generating consistent speech style and prosody across multiple requests. These will not be converted to speech output.
-
-
-
-
-
--
-
-**format:** `typing.Optional[Format]` — Specifies the output audio file format.
-
-
-
-
-
--
-
-**include_timestamp_types:** `typing.Optional[typing.Sequence[TimestampType]]` — The set of timestamp types to include in the response. Only supported for Octave 2 requests.
-
-
-
-
-
--
-
-**num_generations:** `typing.Optional[int]`
-
-Number of audio generations to produce from the input utterances.
-
-Using `num_generations` enables faster processing than issuing multiple sequential requests. Additionally, specifying `num_generations` allows prosody continuation across all generations without repeating context, ensuring each generation sounds slightly different while maintaining contextual consistency.
-
-
-
-
-
--
-
-**split_utterances:** `typing.Optional[bool]`
-
-Controls how audio output is segmented in the response.
-
-- When **enabled** (`true`), input utterances are automatically split into natural-sounding speech segments.
-
-- When **disabled** (`false`), the response maintains a strict one-to-one mapping between input utterances and output snippets.
-
-This setting affects how the `snippets` array is structured in the response, which may be important for applications that need to track the relationship between input text and generated audio segments. When setting to `false`, avoid including utterances with long `text`, as this can result in distorted output.
-
-
-
-
-
--
-
-**strip_headers:** `typing.Optional[bool]` — If enabled, the audio for all the chunks of a generation, once concatenated together, will constitute a single audio file. Otherwise, if disabled, each chunk's audio will be its own audio file, each with its own headers (if applicable).
-
-
-
-
-
--
-
-**version:** `typing.Optional[OctaveVersion]`
-
-Selects the Octave model version used to synthesize speech for this request. If you omit this field, Hume automatically routes the request to the most appropriate model. Setting a specific version ensures stable and repeatable behavior across requests.
-
-Use `2` to opt into the latest Octave capabilities. When you specify version `2`, you must also provide a `voice`. Requests that set `version: 2` without a voice will be rejected.
-
-For a comparison of Octave versions, see the [Octave versions](/docs/text-to-speech-tts/overview#octave-versions) section in the TTS overview.
-
-
-
-
-
--
-
-**instant_mode:** `typing.Optional[bool]`
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response.
-
-
-
-
-
-
-
-
-
-
-
-client.tts.synthesize_file_streaming(...) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[bytes]]]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Streams synthesized speech using the specified voice. If no voice is provided, a novel voice will be generated dynamically. Optionally, additional context can be included to influence the speech's style and prosody.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-from hume.tts import PostedUtterance, PostedUtteranceVoiceWithName
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.tts.synthesize_file_streaming(
- utterances=[
- PostedUtterance(
- text="Beauty is no quality in things themselves: It exists merely in the mind which contemplates them.",
- voice=PostedUtteranceVoiceWithName(
- name="Male English Actor",
- provider="HUME_AI",
- ),
- )
- ],
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**utterances:** `typing.Sequence[PostedUtterance]`
-
-A list of **Utterances** to be converted to speech output.
-
-An **Utterance** is a unit of input for [Octave](/docs/text-to-speech-tts/overview), and includes input `text`, an optional `description` to serve as the prompt for how the speech should be delivered, an optional `voice` specification, and additional controls to guide delivery for `speed` and `trailing_silence`.
-
-
-
-
-
--
-
-**context:** `typing.Optional[PostedContext]` — Utterances to use as context for generating consistent speech style and prosody across multiple requests. These will not be converted to speech output.
-
-
-
-
-
--
-
-**format:** `typing.Optional[Format]` — Specifies the output audio file format.
-
-
-
-
-
--
-
-**include_timestamp_types:** `typing.Optional[typing.Sequence[TimestampType]]` — The set of timestamp types to include in the response. Only supported for Octave 2 requests.
-
-
-
-
-
--
-
-**num_generations:** `typing.Optional[int]`
-
-Number of audio generations to produce from the input utterances.
-
-Using `num_generations` enables faster processing than issuing multiple sequential requests. Additionally, specifying `num_generations` allows prosody continuation across all generations without repeating context, ensuring each generation sounds slightly different while maintaining contextual consistency.
-
-
-
-
-
--
-
-**split_utterances:** `typing.Optional[bool]`
-
-Controls how audio output is segmented in the response.
-
-- When **enabled** (`true`), input utterances are automatically split into natural-sounding speech segments.
-
-- When **disabled** (`false`), the response maintains a strict one-to-one mapping between input utterances and output snippets.
-
-This setting affects how the `snippets` array is structured in the response, which may be important for applications that need to track the relationship between input text and generated audio segments. When setting to `false`, avoid including utterances with long `text`, as this can result in distorted output.
-
-
-
-
-
--
-
-**strip_headers:** `typing.Optional[bool]` — If enabled, the audio for all the chunks of a generation, once concatenated together, will constitute a single audio file. Otherwise, if disabled, each chunk's audio will be its own audio file, each with its own headers (if applicable).
-
-
-
-
-
--
-
-**version:** `typing.Optional[OctaveVersion]`
-
-Selects the Octave model version used to synthesize speech for this request. If you omit this field, Hume automatically routes the request to the most appropriate model. Setting a specific version ensures stable and repeatable behavior across requests.
-
-Use `2` to opt into the latest Octave capabilities. When you specify version `2`, you must also provide a `voice`. Requests that set `version: 2` without a voice will be rejected.
-
-For a comparison of Octave versions, see the [Octave versions](/docs/text-to-speech-tts/overview#octave-versions) section in the TTS overview.
-
-
-
-
-
--
-
-**instant_mode:** `typing.Optional[bool]`
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. You can pass in configuration such as `chunk_size`, and more to customize the request and response.
-
-
-
-
-
-
-
-
-
-
-
-client.tts.synthesize_json_streaming(...) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[TtsOutput]]]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Streams synthesized speech using the specified voice. If no voice is provided, a novel voice will be generated dynamically. Optionally, additional context can be included to influence the speech's style and prosody.
-
-The response is a stream of JSON objects including audio encoded in base64.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-from hume.tts import PostedUtterance, PostedUtteranceVoiceWithName
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-response = client.tts.synthesize_json_streaming(
- utterances=[
- PostedUtterance(
- text="Beauty is no quality in things themselves: It exists merely in the mind which contemplates them.",
- voice=PostedUtteranceVoiceWithName(
- name="Male English Actor",
- provider="HUME_AI",
- ),
- )
- ],
-)
-for chunk in response.data:
- yield chunk
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**utterances:** `typing.Sequence[PostedUtterance]`
-
-A list of **Utterances** to be converted to speech output.
-
-An **Utterance** is a unit of input for [Octave](/docs/text-to-speech-tts/overview), and includes input `text`, an optional `description` to serve as the prompt for how the speech should be delivered, an optional `voice` specification, and additional controls to guide delivery for `speed` and `trailing_silence`.
-
-
-
-
-
--
-
-**context:** `typing.Optional[PostedContext]` — Utterances to use as context for generating consistent speech style and prosody across multiple requests. These will not be converted to speech output.
-
-
-
-
-
--
-
-**format:** `typing.Optional[Format]` — Specifies the output audio file format.
-
-
-
-
-
--
-
-**include_timestamp_types:** `typing.Optional[typing.Sequence[TimestampType]]` — The set of timestamp types to include in the response. Only supported for Octave 2 requests.
-
-
-
-
-
--
-
-**num_generations:** `typing.Optional[int]`
-
-Number of audio generations to produce from the input utterances.
-
-Using `num_generations` enables faster processing than issuing multiple sequential requests. Additionally, specifying `num_generations` allows prosody continuation across all generations without repeating context, ensuring each generation sounds slightly different while maintaining contextual consistency.
-
-
-
-
-
--
-
-**split_utterances:** `typing.Optional[bool]`
-
-Controls how audio output is segmented in the response.
-
-- When **enabled** (`true`), input utterances are automatically split into natural-sounding speech segments.
-
-- When **disabled** (`false`), the response maintains a strict one-to-one mapping between input utterances and output snippets.
-
-This setting affects how the `snippets` array is structured in the response, which may be important for applications that need to track the relationship between input text and generated audio segments. When setting to `false`, avoid including utterances with long `text`, as this can result in distorted output.
-
-
-
-
-
--
-
-**strip_headers:** `typing.Optional[bool]` — If enabled, the audio for all the chunks of a generation, once concatenated together, will constitute a single audio file. Otherwise, if disabled, each chunk's audio will be its own audio file, each with its own headers (if applicable).
-
-
-
-
-
--
-
-**version:** `typing.Optional[OctaveVersion]`
-
-Selects the Octave model version used to synthesize speech for this request. If you omit this field, Hume automatically routes the request to the most appropriate model. Setting a specific version ensures stable and repeatable behavior across requests.
-
-Use `2` to opt into the latest Octave capabilities. When you specify version `2`, you must also provide a `voice`. Requests that set `version: 2` without a voice will be rejected.
-
-For a comparison of Octave versions, see the [Octave versions](/docs/text-to-speech-tts/overview#octave-versions) section in the TTS overview.
-
-
-
-
-
--
-
-**instant_mode:** `typing.Optional[bool]`
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.tts.convert_voice_json(...) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[TtsOutput]]]
-
--
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-response = client.tts.convert_voice_json()
-for chunk in response.data:
- yield chunk
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**strip_headers:** `typing.Optional[bool]` — If enabled, the audio for all the chunks of a generation, once concatenated together, will constitute a single audio file. Otherwise, if disabled, each chunk's audio will be its own audio file, each with its own headers (if applicable).
-
-
-
-
-
--
-
-**audio:** `from __future__ import annotations
-
-typing.Optional[core.File]` — See core.File for more documentation
-
-
-
-
-
--
-
-**context:** `typing.Optional[PostedContext]` — Utterances to use as context for generating consistent speech style and prosody across multiple requests. These will not be converted to speech output.
-
-
-
-
-
--
-
-**voice:** `typing.Optional[PostedUtteranceVoice]`
-
-
-
-
-
--
-
-**format:** `typing.Optional[Format]` — Specifies the output audio file format.
-
-
-
-
-
--
-
-**include_timestamp_types:** `typing.Optional[typing.List[TimestampType]]` — The set of timestamp types to include in the response. When used in multipart/form-data, specify each value using bracket notation: `include_timestamp_types[0]=word&include_timestamp_types[1]=phoneme`. Only supported for Octave 2 requests.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-## Tts Voices
-client.tts.voices.list(...) -> AsyncPager[ReturnVoice, ReturnPagedVoices]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Lists voices you have saved in your account, or voices from the [Voice Library](https://app.hume.ai/tts/voice-library).
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-response = client.tts.voices.list(
- provider="CUSTOM_VOICE",
-)
-for item in response:
- yield item
-# alternatively, you can paginate page-by-page
-for page in response.iter_pages():
- yield page
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**provider:** `VoiceProvider`
-
-Specify the voice provider to filter voices returned by the endpoint:
-
-- **`HUME_AI`**: Lists preset, shared voices from Hume's [Voice Library](https://app.hume.ai/tts/voice-library).
-- **`CUSTOM_VOICE`**: Lists custom voices created and saved to your account.
-
-
-
-
-
--
-
-**page_number:** `typing.Optional[int]`
-
-Specifies the page number to retrieve, enabling pagination.
-
-This parameter uses zero-based indexing. For example, setting `page_number` to 0 retrieves the first page of results (items 0-9 if `page_size` is 10), setting `page_number` to 1 retrieves the second page (items 10-19), and so on. Defaults to 0, which retrieves the first page.
-
-
-
-
-
--
-
-**page_size:** `typing.Optional[int]`
-
-Specifies the maximum number of results to include per page, enabling pagination. The value must be between 1 and 100, inclusive.
-
-For example, if `page_size` is set to 10, each page will include up to 10 items. Defaults to 10.
-
-
-
-
-
--
-
-**ascending_order:** `typing.Optional[bool]`
-
-
-
-
-
--
-
-**filter_tag:** `typing.Optional[typing.Union[str, typing.Sequence[str]]]`
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.tts.voices.create(...) -> AsyncHttpResponse[ReturnVoice]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Saves a new custom voice to your account using the specified TTS generation ID.
-
-Once saved, this voice can be reused in subsequent TTS requests, ensuring consistent speech style and prosody. For more details on voice creation, see the [Voices Guide](/docs/text-to-speech-tts/voices).
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.tts.voices.create(
- generation_id="795c949a-1510-4a80-9646-7d0863b023ab",
- name="David Hume",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**generation_id:** `str` — A unique ID associated with this TTS generation that can be used as context for generating consistent speech style and prosody across multiple requests.
-
-
-
-
-
--
-
-**name:** `str` — The name of a **Voice**.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.tts.voices.delete(...) -> AsyncHttpResponse[None]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Deletes a previously generated custom voice.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.tts.voices.delete(
- name="David Hume",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**name:** `str` — Name of the voice to delete
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-## EmpathicVoice ControlPlane
-client.empathic_voice.control_plane.send(...) -> AsyncHttpResponse[None]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Send a message to a specific chat.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-from hume.empathic_voice import SessionSettings
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.control_plane.send(
- chat_id="chat_id",
- request=SessionSettings(),
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**chat_id:** `str`
-
-
-
-
-
--
-
-**request:** `ControlPlanePublishEvent`
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-## EmpathicVoice ChatGroups
-client.empathic_voice.chat_groups.list_chat_groups(...) -> AsyncPager[ReturnChatGroup, ReturnPagedChatGroups]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a paginated list of **Chat Groups**.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-response = client.empathic_voice.chat_groups.list_chat_groups(
- page_number=0,
- page_size=1,
- ascending_order=True,
- config_id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
-)
-for item in response:
- yield item
-# alternatively, you can paginate page-by-page
-for page in response.iter_pages():
- yield page
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**page_number:** `typing.Optional[int]`
-
-Specifies the page number to retrieve, enabling pagination.
-
-This parameter uses zero-based indexing. For example, setting `page_number` to 0 retrieves the first page of results (items 0-9 if `page_size` is 10), setting `page_number` to 1 retrieves the second page (items 10-19), and so on. Defaults to 0, which retrieves the first page.
-
-
-
-
-
--
-
-**page_size:** `typing.Optional[int]`
-
-Specifies the maximum number of results to include per page, enabling pagination. The value must be between 1 and 100, inclusive.
-
-For example, if `page_size` is set to 10, each page will include up to 10 items. Defaults to 10.
-
-
-
-
-
--
-
-**ascending_order:** `typing.Optional[bool]` — Specifies the sorting order of the results based on their creation date. Set to true for ascending order (chronological, with the oldest records first) and false for descending order (reverse-chronological, with the newest records first). Defaults to true.
-
-
-
-
-
--
-
-**config_id:** `typing.Optional[str]`
-
-The unique identifier for an EVI configuration.
-
-Filter Chat Groups to only include Chats that used this `config_id` in their most recent Chat.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.chat_groups.get_chat_group(...) -> AsyncHttpResponse[ReturnChatGroupPagedChats]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a **ChatGroup** by ID, including a paginated list of **Chats** associated with the **ChatGroup**.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.chat_groups.get_chat_group(
- id="697056f0-6c7e-487d-9bd8-9c19df79f05f",
- page_number=0,
- page_size=1,
- ascending_order=True,
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Chat Group. Formatted as a UUID.
-
-
-
-
-
--
-
-**status:** `typing.Optional[str]` — Chat status to apply to the chat. String from the ChatStatus enum.
-
-
-
-
-
--
-
-**page_size:** `typing.Optional[int]`
-
-Specifies the maximum number of results to include per page, enabling pagination. The value must be between 1 and 100, inclusive.
-
-For example, if `page_size` is set to 10, each page will include up to 10 items. Defaults to 10.
-
-
-
-
-
--
-
-**page_number:** `typing.Optional[int]`
-
-Specifies the page number to retrieve, enabling pagination.
-
-This parameter uses zero-based indexing. For example, setting `page_number` to 0 retrieves the first page of results (items 0-9 if `page_size` is 10), setting `page_number` to 1 retrieves the second page (items 10-19), and so on. Defaults to 0, which retrieves the first page.
-
-
-
-
-
--
-
-**ascending_order:** `typing.Optional[bool]` — Specifies the sorting order of the results based on their creation date. Set to true for ascending order (chronological, with the oldest records first) and false for descending order (reverse-chronological, with the newest records first). Defaults to true.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.chat_groups.get_audio(...) -> AsyncHttpResponse[ReturnChatGroupPagedAudioReconstructions]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a paginated list of audio for each **Chat** within the specified **Chat Group**. For more details, see our guide on audio reconstruction [here](/docs/speech-to-speech-evi/faq#can-i-access-the-audio-of-previous-conversations-with-evi).
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.chat_groups.get_audio(
- id="369846cf-6ad5-404d-905e-a8acb5cdfc78",
- page_number=0,
- page_size=10,
- ascending_order=True,
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Chat Group. Formatted as a UUID.
-
-
-
-
-
--
-
-**page_number:** `typing.Optional[int]`
-
-Specifies the page number to retrieve, enabling pagination.
-
-This parameter uses zero-based indexing. For example, setting `page_number` to 0 retrieves the first page of results (items 0-9 if `page_size` is 10), setting `page_number` to 1 retrieves the second page (items 10-19), and so on. Defaults to 0, which retrieves the first page.
-
-
-
-
-
--
-
-**page_size:** `typing.Optional[int]`
-
-Specifies the maximum number of results to include per page, enabling pagination. The value must be between 1 and 100, inclusive.
-
-For example, if `page_size` is set to 10, each page will include up to 10 items. Defaults to 10.
-
-
-
-
-
--
-
-**ascending_order:** `typing.Optional[bool]` — Specifies the sorting order of the results based on their creation date. Set to true for ascending order (chronological, with the oldest records first) and false for descending order (reverse-chronological, with the newest records first). Defaults to true.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.chat_groups.list_chat_group_events(...) -> AsyncPager[ReturnChatEvent, ReturnChatGroupPagedEvents]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a paginated list of **Chat** events associated with a **Chat Group**.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-response = client.empathic_voice.chat_groups.list_chat_group_events(
- id="697056f0-6c7e-487d-9bd8-9c19df79f05f",
- page_number=0,
- page_size=3,
- ascending_order=True,
-)
-for item in response:
- yield item
-# alternatively, you can paginate page-by-page
-for page in response.iter_pages():
- yield page
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Chat Group. Formatted as a UUID.
-
-
-
-
-
--
-
-**page_size:** `typing.Optional[int]`
-
-Specifies the maximum number of results to include per page, enabling pagination. The value must be between 1 and 100, inclusive.
-
-For example, if `page_size` is set to 10, each page will include up to 10 items. Defaults to 10.
-
-
-
-
-
--
-
-**page_number:** `typing.Optional[int]`
-
-Specifies the page number to retrieve, enabling pagination.
-
-This parameter uses zero-based indexing. For example, setting `page_number` to 0 retrieves the first page of results (items 0-9 if `page_size` is 10), setting `page_number` to 1 retrieves the second page (items 10-19), and so on. Defaults to 0, which retrieves the first page.
-
-
-
-
-
--
-
-**ascending_order:** `typing.Optional[bool]` — Specifies the sorting order of the results based on their creation date. Set to true for ascending order (chronological, with the oldest records first) and false for descending order (reverse-chronological, with the newest records first). Defaults to true.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-## EmpathicVoice Chats
-client.empathic_voice.chats.list_chats(...) -> AsyncPager[ReturnChat, ReturnPagedChats]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a paginated list of **Chats**.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-response = client.empathic_voice.chats.list_chats(
- page_number=0,
- page_size=1,
- ascending_order=True,
-)
-for item in response:
- yield item
-# alternatively, you can paginate page-by-page
-for page in response.iter_pages():
- yield page
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**page_number:** `typing.Optional[int]`
-
-Specifies the page number to retrieve, enabling pagination.
-
-This parameter uses zero-based indexing. For example, setting `page_number` to 0 retrieves the first page of results (items 0-9 if `page_size` is 10), setting `page_number` to 1 retrieves the second page (items 10-19), and so on. Defaults to 0, which retrieves the first page.
-
-
-
-
-
--
-
-**page_size:** `typing.Optional[int]`
-
-Specifies the maximum number of results to include per page, enabling pagination. The value must be between 1 and 100, inclusive.
-
-For example, if `page_size` is set to 10, each page will include up to 10 items. Defaults to 10.
-
-
-
-
-
--
-
-**ascending_order:** `typing.Optional[bool]` — Specifies the sorting order of the results based on their creation date. Set to true for ascending order (chronological, with the oldest records first) and false for descending order (reverse-chronological, with the newest records first). Defaults to true.
-
-
-
-
-
--
-
-**config_id:** `typing.Optional[str]` — Filter to only include chats that used this config.
-
-
-
-
-
--
-
-**status:** `typing.Optional[str]` — Chat status to apply to the chat. String from the ChatStatus enum.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.chats.list_chat_events(...) -> AsyncPager[ReturnChatEvent, ReturnChatPagedEvents]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a paginated list of **Chat** events.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-response = client.empathic_voice.chats.list_chat_events(
- id="470a49f6-1dec-4afe-8b61-035d3b2d63b0",
- page_number=0,
- page_size=3,
- ascending_order=True,
-)
-for item in response:
- yield item
-# alternatively, you can paginate page-by-page
-for page in response.iter_pages():
- yield page
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Chat. Formatted as a UUID.
-
-
-
-
-
--
-
-**page_size:** `typing.Optional[int]`
-
-Specifies the maximum number of results to include per page, enabling pagination. The value must be between 1 and 100, inclusive.
-
-For example, if `page_size` is set to 10, each page will include up to 10 items. Defaults to 10.
-
-
-
-
-
--
-
-**page_number:** `typing.Optional[int]`
-
-Specifies the page number to retrieve, enabling pagination.
-
-This parameter uses zero-based indexing. For example, setting `page_number` to 0 retrieves the first page of results (items 0-9 if `page_size` is 10), setting `page_number` to 1 retrieves the second page (items 10-19), and so on. Defaults to 0, which retrieves the first page.
-
-
-
-
-
--
-
-**ascending_order:** `typing.Optional[bool]` — Specifies the sorting order of the results based on their creation date. Set to true for ascending order (chronological, with the oldest records first) and false for descending order (reverse-chronological, with the newest records first). Defaults to true.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.chats.get_audio(...) -> AsyncHttpResponse[ReturnChatAudioReconstruction]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches the audio of a previous **Chat**. For more details, see our guide on audio reconstruction [here](/docs/speech-to-speech-evi/faq#can-i-access-the-audio-of-previous-conversations-with-evi).
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.chats.get_audio(
- id="470a49f6-1dec-4afe-8b61-035d3b2d63b0",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Chat. Formatted as a UUID.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-## EmpathicVoice Configs
-client.empathic_voice.configs.list_configs(...) -> AsyncPager[ReturnConfig, ReturnPagedConfigs]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a paginated list of **Configs**.
-
-For more details on configuration options and how to configure EVI, see our [configuration guide](/docs/speech-to-speech-evi/configuration).
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-response = client.empathic_voice.configs.list_configs(
- page_number=0,
- page_size=1,
-)
-for item in response:
- yield item
-# alternatively, you can paginate page-by-page
-for page in response.iter_pages():
- yield page
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**page_number:** `typing.Optional[int]`
-
-Specifies the page number to retrieve, enabling pagination.
-
-This parameter uses zero-based indexing. For example, setting `page_number` to 0 retrieves the first page of results (items 0-9 if `page_size` is 10), setting `page_number` to 1 retrieves the second page (items 10-19), and so on. Defaults to 0, which retrieves the first page.
-
-
-
-
-
--
-
-**page_size:** `typing.Optional[int]`
-
-Specifies the maximum number of results to include per page, enabling pagination. The value must be between 1 and 100, inclusive.
-
-For example, if `page_size` is set to 10, each page will include up to 10 items. Defaults to 10.
-
-
-
-
-
--
-
-**restrict_to_most_recent:** `typing.Optional[bool]` — By default, `restrict_to_most_recent` is set to true, returning only the latest version of each config. To include all versions of each config in the list, set `restrict_to_most_recent` to false.
-
-
-
-
-
--
-
-**name:** `typing.Optional[str]` — Filter to only include configs with this name.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.configs.create_config(...) -> AsyncHttpResponse[ReturnConfig]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Creates a **Config** which can be applied to EVI.
-
-For more details on configuration options and how to configure EVI, see our [configuration guide](/docs/speech-to-speech-evi/configuration).
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-from hume.empathic_voice import (
- PostedConfigPromptSpec,
- PostedEventMessageSpec,
- PostedEventMessageSpecs,
- PostedLanguageModel,
- VoiceName,
-)
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.configs.create_config(
- name="Weather Assistant Config",
- prompt=PostedConfigPromptSpec(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
- version=0,
- ),
- evi_version="3",
- voice=VoiceName(
- provider="HUME_AI",
- name="Ava Song",
- ),
- language_model=PostedLanguageModel(
- model_provider="ANTHROPIC",
- model_resource="claude-3-7-sonnet-latest",
- temperature=1.0,
- ),
- event_messages=PostedEventMessageSpecs(
- on_new_chat=PostedEventMessageSpec(
- enabled=False,
- text="",
- ),
- on_inactivity_timeout=PostedEventMessageSpec(
- enabled=False,
- text="",
- ),
- on_max_duration_timeout=PostedEventMessageSpec(
- enabled=False,
- text="",
- ),
- ),
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**evi_version:** `str` — EVI version to use. Only versions `3` and `4-mini` are supported.
-
-
-
-
-
--
-
-**name:** `str` — Name applied to all versions of a particular Config.
-
-
-
-
-
--
-
-**builtin_tools:** `typing.Optional[typing.Sequence[typing.Optional[PostedBuiltinTool]]]` — Built-in tool specification for a Config.
-
-
-
-
-
--
-
-**ellm_model:** `typing.Optional[PostedEllmModel]`
-
-
-
-
-
--
-
-**event_messages:** `typing.Optional[PostedEventMessageSpecs]`
-
-
-
-
-
--
-
-**language_model:** `typing.Optional[PostedLanguageModel]`
-
-
-
-
-
--
-
-**nudges:** `typing.Optional[PostedNudgeSpec]`
-
-
-
-
-
--
-
-**prompt:** `typing.Optional[PostedConfigPromptSpec]`
-
-
-
-
-
--
-
-**timeouts:** `typing.Optional[PostedTimeoutSpecs]`
-
-
-
-
-
--
-
-**tools:** `typing.Optional[typing.Sequence[typing.Optional[PostedUserDefinedToolSpec]]]` — Tool specification for a Config.
-
-
-
-
-
--
-
-**version_description:** `typing.Optional[str]` — An optional description of the Config version.
-
-
-
-
-
--
-
-**voice:** `typing.Optional[VoiceRef]` — A voice specification associated with this Config.
-
-
-
-
-
--
-
-**webhooks:** `typing.Optional[typing.Sequence[typing.Optional[PostedWebhookSpec]]]` — Webhook config specifications for each subscriber.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.configs.list_config_versions(...) -> AsyncPager[ReturnConfig, ReturnPagedConfigs]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a list of a **Config's** versions.
-
-For more details on configuration options and how to configure EVI, see our [configuration guide](/docs/speech-to-speech-evi/configuration).
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-response = client.empathic_voice.configs.list_config_versions(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
-)
-for item in response:
- yield item
-# alternatively, you can paginate page-by-page
-for page in response.iter_pages():
- yield page
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Config. Formatted as a UUID.
-
-
-
-
-
--
-
-**page_number:** `typing.Optional[int]`
-
-Specifies the page number to retrieve, enabling pagination.
-
-This parameter uses zero-based indexing. For example, setting `page_number` to 0 retrieves the first page of results (items 0-9 if `page_size` is 10), setting `page_number` to 1 retrieves the second page (items 10-19), and so on. Defaults to 0, which retrieves the first page.
-
-
-
-
-
--
-
-**page_size:** `typing.Optional[int]`
-
-Specifies the maximum number of results to include per page, enabling pagination. The value must be between 1 and 100, inclusive.
-
-For example, if `page_size` is set to 10, each page will include up to 10 items. Defaults to 10.
-
-
-
-
-
--
-
-**restrict_to_most_recent:** `typing.Optional[bool]` — By default, `restrict_to_most_recent` is set to true, returning only the latest version of each config. To include all versions of each config in the list, set `restrict_to_most_recent` to false.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.configs.create_config_version(...) -> AsyncHttpResponse[ReturnConfig]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Updates a **Config** by creating a new version of the **Config**.
-
-For more details on configuration options and how to configure EVI, see our [configuration guide](/docs/speech-to-speech-evi/configuration).
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-from hume.empathic_voice import (
- PostedConfigPromptSpec,
- PostedEllmModel,
- PostedEventMessageSpec,
- PostedEventMessageSpecs,
- PostedLanguageModel,
- VoiceName,
-)
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.configs.create_config_version(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
- version_description="This is an updated version of the Weather Assistant Config.",
- evi_version="3",
- prompt=PostedConfigPromptSpec(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
- version=0,
- ),
- voice=VoiceName(
- provider="HUME_AI",
- name="Ava Song",
- ),
- language_model=PostedLanguageModel(
- model_provider="ANTHROPIC",
- model_resource="claude-3-7-sonnet-latest",
- temperature=1.0,
- ),
- ellm_model=PostedEllmModel(
- allow_short_responses=True,
- ),
- event_messages=PostedEventMessageSpecs(
- on_new_chat=PostedEventMessageSpec(
- enabled=False,
- text="",
- ),
- on_inactivity_timeout=PostedEventMessageSpec(
- enabled=False,
- text="",
- ),
- on_max_duration_timeout=PostedEventMessageSpec(
- enabled=False,
- text="",
- ),
- ),
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Config. Formatted as a UUID.
-
-
-
-
-
--
-
-**evi_version:** `str` — EVI version to use. Only versions `3` and `4-mini` are supported.
-
-
-
-
-
--
-
-**builtin_tools:** `typing.Optional[typing.Sequence[typing.Optional[PostedBuiltinTool]]]` — Built-in tool specification for a Config.
-
-
-
-
-
--
-
-**ellm_model:** `typing.Optional[PostedEllmModel]`
-
-
-
-
-
--
-
-**event_messages:** `typing.Optional[PostedEventMessageSpecs]`
-
-
-
-
-
--
-
-**language_model:** `typing.Optional[PostedLanguageModel]`
-
-
-
-
-
--
-
-**nudges:** `typing.Optional[PostedNudgeSpec]`
-
-
-
-
-
--
-
-**prompt:** `typing.Optional[PostedConfigPromptSpec]`
-
-
-
-
-
--
-
-**timeouts:** `typing.Optional[PostedTimeoutSpecs]`
-
-
-
-
-
--
-
-**tools:** `typing.Optional[typing.Sequence[typing.Optional[PostedUserDefinedToolSpec]]]` — Tool specification for a Config.
-
-
-
-
-
--
-
-**version_description:** `typing.Optional[str]` — An optional description of the Config version.
-
-
-
-
-
--
-
-**voice:** `typing.Optional[VoiceRef]`
-
-
-
-
-
--
-
-**webhooks:** `typing.Optional[typing.Sequence[typing.Optional[PostedWebhookSpec]]]` — Webhook config specifications for each subscriber.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.configs.delete_config(...) -> AsyncHttpResponse[None]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Deletes a **Config** and its versions.
-
-For more details on configuration options and how to configure EVI, see our [configuration guide](/docs/speech-to-speech-evi/configuration).
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.configs.delete_config(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Config. Formatted as a UUID.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.configs.update_config_name(...) -> AsyncHttpResponse[str]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Updates the name of a **Config**.
-
-For more details on configuration options and how to configure EVI, see our [configuration guide](/docs/speech-to-speech-evi/configuration).
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.configs.update_config_name(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
- name="Updated Weather Assistant Config Name",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Config. Formatted as a UUID.
-
-
-
-
-
--
-
-**name:** `str` — Name applied to all versions of a particular Config.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.configs.get_config_version(...) -> AsyncHttpResponse[ReturnConfig]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a specified version of a **Config**.
-
-For more details on configuration options and how to configure EVI, see our [configuration guide](/docs/speech-to-speech-evi/configuration).
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.configs.get_config_version(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
- version=1,
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Config. Formatted as a UUID.
-
-
-
-
-
--
-
-**version:** `int`
-
-Version number for a Config.
-
-Configs, Prompts, Custom Voices, and Tools are versioned. This versioning system supports iterative development, allowing you to progressively refine configurations and revert to previous versions if needed.
-
-Version numbers are integer values representing different iterations of the Config. Each update to the Config increments its version number.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.configs.delete_config_version(...) -> AsyncHttpResponse[None]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Deletes a specified version of a **Config**.
-
-For more details on configuration options and how to configure EVI, see our [configuration guide](/docs/speech-to-speech-evi/configuration).
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.configs.delete_config_version(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
- version=1,
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Config. Formatted as a UUID.
-
-
-
-
-
--
-
-**version:** `int`
-
-Version number for a Config.
-
-Configs, Prompts, Custom Voices, and Tools are versioned. This versioning system supports iterative development, allowing you to progressively refine configurations and revert to previous versions if needed.
-
-Version numbers are integer values representing different iterations of the Config. Each update to the Config increments its version number.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.configs.update_config_description(...) -> AsyncHttpResponse[ReturnConfig]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Updates the description of a **Config**.
-
-For more details on configuration options and how to configure EVI, see our [configuration guide](/docs/speech-to-speech-evi/configuration).
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.configs.update_config_description(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
- version=1,
- version_description="This is an updated version_description.",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Config. Formatted as a UUID.
-
-
-
-
-
--
-
-**version:** `int`
-
-Version number for a Config.
-
-Configs, Prompts, Custom Voices, and Tools are versioned. This versioning system supports iterative development, allowing you to progressively refine configurations and revert to previous versions if needed.
-
-Version numbers are integer values representing different iterations of the Config. Each update to the Config increments its version number.
-
-
-
-
-
--
-
-**version_description:** `typing.Optional[str]` — An optional description of the Config version.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-## EmpathicVoice Prompts
-client.empathic_voice.prompts.list_prompts(...) -> AsyncPager[typing.Optional[ReturnPrompt], ReturnPagedPrompts]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a paginated list of **Prompts**.
-
-See our [prompting guide](/docs/speech-to-speech-evi/guides/phone-calling) for tips on crafting your system prompt.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-response = client.empathic_voice.prompts.list_prompts(
- page_number=0,
- page_size=2,
-)
-for item in response:
- yield item
-# alternatively, you can paginate page-by-page
-for page in response.iter_pages():
- yield page
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**page_number:** `typing.Optional[int]`
-
-Specifies the page number to retrieve, enabling pagination.
-
-This parameter uses zero-based indexing. For example, setting `page_number` to 0 retrieves the first page of results (items 0-9 if `page_size` is 10), setting `page_number` to 1 retrieves the second page (items 10-19), and so on. Defaults to 0, which retrieves the first page.
-
-
-
-
-
--
-
-**page_size:** `typing.Optional[int]`
-
-Specifies the maximum number of results to include per page, enabling pagination. The value must be between 1 and 100, inclusive.
-
-For example, if `page_size` is set to 10, each page will include up to 10 items. Defaults to 10.
-
-
-
-
-
--
-
-**restrict_to_most_recent:** `typing.Optional[bool]` — By default, `restrict_to_most_recent` is set to true, returning only the latest version of each prompt. To include all versions of each prompt in the list, set `restrict_to_most_recent` to false.
-
-
-
-
-
--
-
-**name:** `typing.Optional[str]` — Filter to only include prompts with name.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.prompts.create_prompt(...) -> AsyncHttpResponse[typing.Optional[ReturnPrompt]]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Creates a **Prompt** that can be added to an [EVI configuration](/reference/speech-to-speech-evi/configs/create-config).
-
-See our [prompting guide](/docs/speech-to-speech-evi/guides/phone-calling) for tips on crafting your system prompt.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.prompts.create_prompt(
- name="Weather Assistant Prompt",
- text="You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**name:** `str` — Name applied to all versions of a particular Prompt.
-
-
-
-
-
--
-
-**text:** `str`
-
-Instructions used to shape EVI's behavior, responses, and style.
-
-You can use the Prompt to define a specific goal or role for EVI, specifying how it should act or what it should focus on during the conversation. For example, EVI can be instructed to act as a customer support representative, a fitness coach, or a travel advisor, each with its own set of behaviors and response styles. For help writing a system prompt, see our [Prompting Guide](/docs/speech-to-speech-evi/guides/prompting).
-
-
-
-
-
--
-
-**version_description:** `typing.Optional[str]` — An optional description of the Prompt version.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.prompts.list_prompt_versions(...) -> AsyncHttpResponse[ReturnPagedPrompts]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a list of a **Prompt's** versions.
-
-See our [prompting guide](/docs/speech-to-speech-evi/guides/phone-calling) for tips on crafting your system prompt.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.prompts.list_prompt_versions(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str`
-
-
-
-
-
--
-
-**page_number:** `typing.Optional[int]`
-
-Specifies the page number to retrieve, enabling pagination.
-
-This parameter uses zero-based indexing. For example, setting `page_number` to 0 retrieves the first page of results (items 0-9 if `page_size` is 10), setting `page_number` to 1 retrieves the second page (items 10-19), and so on. Defaults to 0, which retrieves the first page.
-
-
-
-
-
--
-
-**page_size:** `typing.Optional[int]`
-
-Specifies the maximum number of results to include per page, enabling pagination. The value must be between 1 and 100, inclusive.
-
-For example, if `page_size` is set to 10, each page will include up to 10 items. Defaults to 10.
-
-
-
-
-
--
-
-**restrict_to_most_recent:** `typing.Optional[bool]` — By default, `restrict_to_most_recent` is set to true, returning only the latest version of each prompt. To include all versions of each prompt in the list, set `restrict_to_most_recent` to false.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.prompts.create_prompt_version(...) -> AsyncHttpResponse[typing.Optional[ReturnPrompt]]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Updates a **Prompt** by creating a new version of the **Prompt**.
-
-See our [prompting guide](/docs/speech-to-speech-evi/guides/phone-calling) for tips on crafting your system prompt.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.prompts.create_prompt_version(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
- text="You are an updated version of an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.",
- version_description="This is an updated version of the Weather Assistant Prompt.",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Prompt. Formatted as a UUID.
-
-
-
-
-
--
-
-**text:** `str`
-
-Instructions used to shape EVI's behavior, responses, and style.
-
-You can use the Prompt to define a specific goal or role for EVI, specifying how it should act or what it should focus on during the conversation. For example, EVI can be instructed to act as a customer support representative, a fitness coach, or a travel advisor, each with its own set of behaviors and response styles. For help writing a system prompt, see our [Prompting Guide](/docs/speech-to-speech-evi/guides/prompting).
-
-
-
-
-
--
-
-**version_description:** `typing.Optional[str]` — An optional description of the Prompt version.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.prompts.delete_prompt(...) -> AsyncHttpResponse[None]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Deletes a **Prompt** and its versions.
-
-See our [prompting guide](/docs/speech-to-speech-evi/guides/phone-calling) for tips on crafting your system prompt.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.prompts.delete_prompt(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Prompt. Formatted as a UUID.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.prompts.update_prompt_name(...) -> AsyncHttpResponse[str]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Updates the name of a **Prompt**.
-
-See our [prompting guide](/docs/speech-to-speech-evi/guides/phone-calling) for tips on crafting your system prompt.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.prompts.update_prompt_name(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
- name="Updated Weather Assistant Prompt Name",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Prompt. Formatted as a UUID.
-
-
-
-
-
--
-
-**name:** `str` — Name applied to all versions of a particular Prompt.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.prompts.get_prompt_version(...) -> AsyncHttpResponse[typing.Optional[ReturnPrompt]]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a specified version of a **Prompt**.
-
-See our [prompting guide](/docs/speech-to-speech-evi/guides/phone-calling) for tips on crafting your system prompt.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.prompts.get_prompt_version(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
- version=0,
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Prompt. Formatted as a UUID.
-
-
-
-
-
--
-
-**version:** `int`
-
-Version number for a Prompt.
-
-Prompts, Configs, Custom Voices, and Tools are versioned. This versioning system supports iterative development, allowing you to progressively refine prompts and revert to previous versions if needed.
-
-Version numbers are integer values representing different iterations of the Prompt. Each update to the Prompt increments its version number.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.prompts.delete_prompt_version(...) -> AsyncHttpResponse[None]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Deletes a specified version of a **Prompt**.
-
-See our [prompting guide](/docs/speech-to-speech-evi/guides/phone-calling) for tips on crafting your system prompt.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.prompts.delete_prompt_version(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
- version=1,
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Prompt. Formatted as a UUID.
-
-
-
-
-
--
-
-**version:** `int`
-
-Version number for a Prompt.
-
-Prompts, Configs, Custom Voices, and Tools are versioned. This versioning system supports iterative development, allowing you to progressively refine prompts and revert to previous versions if needed.
-
-Version numbers are integer values representing different iterations of the Prompt. Each update to the Prompt increments its version number.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.prompts.update_prompt_description(...) -> AsyncHttpResponse[typing.Optional[ReturnPrompt]]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Updates the description of a **Prompt**.
-
-See our [prompting guide](/docs/speech-to-speech-evi/guides/phone-calling) for tips on crafting your system prompt.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.prompts.update_prompt_description(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
- version=1,
- version_description="This is an updated version_description.",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Prompt. Formatted as a UUID.
-
-
-
-
-
--
-
-**version:** `int`
-
-Version number for a Prompt.
-
-Prompts, Configs, Custom Voices, and Tools are versioned. This versioning system supports iterative development, allowing you to progressively refine prompts and revert to previous versions if needed.
-
-Version numbers are integer values representing different iterations of the Prompt. Each update to the Prompt increments its version number.
-
-
-
-
-
--
-
-**version_description:** `typing.Optional[str]` — An optional description of the Prompt version.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-## EmpathicVoice Tools
-client.empathic_voice.tools.list_tools(...) -> AsyncPager[typing.Optional[ReturnUserDefinedTool], ReturnPagedUserDefinedTools]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a paginated list of **Tools**.
-
-Refer to our [tool use](/docs/speech-to-speech-evi/features/tool-use#function-calling) guide for comprehensive instructions on defining and integrating tools into EVI.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-response = client.empathic_voice.tools.list_tools(
- page_number=0,
- page_size=2,
-)
-for item in response:
- yield item
-# alternatively, you can paginate page-by-page
-for page in response.iter_pages():
- yield page
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**page_number:** `typing.Optional[int]`
-
-Specifies the page number to retrieve, enabling pagination.
-
-This parameter uses zero-based indexing. For example, setting `page_number` to 0 retrieves the first page of results (items 0-9 if `page_size` is 10), setting `page_number` to 1 retrieves the second page (items 10-19), and so on. Defaults to 0, which retrieves the first page.
-
-
-
-
-
--
-
-**page_size:** `typing.Optional[int]`
-
-Specifies the maximum number of results to include per page, enabling pagination. The value must be between 1 and 100, inclusive.
-
-For example, if `page_size` is set to 10, each page will include up to 10 items. Defaults to 10.
-
-
-
-
-
--
-
-**restrict_to_most_recent:** `typing.Optional[bool]` — By default, `restrict_to_most_recent` is set to true, returning only the latest version of each tool. To include all versions of each tool in the list, set `restrict_to_most_recent` to false.
-
-
-
-
-
--
-
-**name:** `typing.Optional[str]` — Filter to only include tools with name.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.tools.create_tool(...) -> AsyncHttpResponse[typing.Optional[ReturnUserDefinedTool]]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Creates a **Tool** that can be added to an [EVI configuration](/reference/speech-to-speech-evi/configs/create-config).
-
-Refer to our [tool use](/docs/speech-to-speech-evi/features/tool-use#function-calling) guide for comprehensive instructions on defining and integrating tools into EVI.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.tools.create_tool(
- name="get_current_weather",
- parameters='{ "type": "object", "properties": { "location": { "type": "string", "description": "The city and state, e.g. San Francisco, CA" }, "format": { "type": "string", "enum": ["celsius", "fahrenheit"], "description": "The temperature unit to use. Infer this from the users location." } }, "required": ["location", "format"] }',
- version_description="Fetches current weather and uses celsius or fahrenheit based on location of user.",
- description="This tool is for getting the current weather.",
- fallback_content="Unable to fetch current weather.",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**name:** `str`
-
-Name of the built-in tool to use. Hume supports the following built-in tools:
-
-- **web_search:** enables EVI to search the web for up-to-date information when applicable.
-- **hang_up:** closes the WebSocket connection when appropriate (e.g., after detecting a farewell in the conversation).
-
-For more information, see our guide on [using built-in tools](/docs/speech-to-speech-evi/features/tool-use#using-built-in-tools).
-
-
-
-
-
--
-
-**parameters:** `str`
-
-Stringified JSON defining the parameters used by this version of the Tool.
-
-These parameters define the inputs needed for the Tool's execution, including the expected data type and description for each input field. Structured as a stringified JSON schema, this format ensures the Tool receives data in the expected format.
-
-
-
-
-
--
-
-**description:** `typing.Optional[str]` — An optional description of what the Tool does, used by the supplemental LLM to choose when and how to call the function.
-
-
-
-
-
--
-
-**fallback_content:** `typing.Optional[str]` — Optional text passed to the supplemental LLM in place of the tool call result. The LLM then uses this text to generate a response back to the user, ensuring continuity in the conversation if the Tool errors.
-
-
-
-
-
--
-
-**version_description:** `typing.Optional[str]` — An optional description of the Tool version.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.tools.list_tool_versions(...) -> AsyncPager[typing.Optional[ReturnUserDefinedTool], ReturnPagedUserDefinedTools]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a list of a **Tool's** versions.
-
-Refer to our [tool use](/docs/speech-to-speech-evi/features/tool-use#function-calling) guide for comprehensive instructions on defining and integrating tools into EVI.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-response = client.empathic_voice.tools.list_tool_versions(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
-)
-for item in response:
- yield item
-# alternatively, you can paginate page-by-page
-for page in response.iter_pages():
- yield page
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str`
-
-
-
-
-
--
-
-**page_number:** `typing.Optional[int]`
-
-Specifies the page number to retrieve, enabling pagination.
-
-This parameter uses zero-based indexing. For example, setting `page_number` to 0 retrieves the first page of results (items 0-9 if `page_size` is 10), setting `page_number` to 1 retrieves the second page (items 10-19), and so on. Defaults to 0, which retrieves the first page.
-
-
-
-
-
--
-
-**page_size:** `typing.Optional[int]`
-
-Specifies the maximum number of results to include per page, enabling pagination. The value must be between 1 and 100, inclusive.
-
-For example, if `page_size` is set to 10, each page will include up to 10 items. Defaults to 10.
-
-
-
-
-
--
-
-**restrict_to_most_recent:** `typing.Optional[bool]` — By default, `restrict_to_most_recent` is set to true, returning only the latest version of each tool. To include all versions of each tool in the list, set `restrict_to_most_recent` to false.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.tools.create_tool_version(...) -> AsyncHttpResponse[typing.Optional[ReturnUserDefinedTool]]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Updates a **Tool** by creating a new version of the **Tool**.
-
-Refer to our [tool use](/docs/speech-to-speech-evi/features/tool-use#function-calling) guide for comprehensive instructions on defining and integrating tools into EVI.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.tools.create_tool_version(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
- parameters='{ "type": "object", "properties": { "location": { "type": "string", "description": "The city and state, e.g. San Francisco, CA" }, "format": { "type": "string", "enum": ["celsius", "fahrenheit", "kelvin"], "description": "The temperature unit to use. Infer this from the users location." } }, "required": ["location", "format"] }',
- version_description="Fetches current weather and uses celsius, fahrenheit, or kelvin based on location of user.",
- fallback_content="Unable to fetch current weather.",
- description="This tool is for getting the current weather.",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Tool. Formatted as a UUID.
-
-
-
-
-
--
-
-**parameters:** `str`
-
-Stringified JSON defining the parameters used by this version of the Tool.
-
-These parameters define the inputs needed for the Tool's execution, including the expected data type and description for each input field. Structured as a stringified JSON schema, this format ensures the Tool receives data in the expected format.
-
-
-
-
-
--
-
-**description:** `typing.Optional[str]` — An optional description of what the Tool does, used by the supplemental LLM to choose when and how to call the function.
-
-
-
-
-
--
-
-**fallback_content:** `typing.Optional[str]` — Optional text passed to the supplemental LLM in place of the tool call result. The LLM then uses this text to generate a response back to the user, ensuring continuity in the conversation if the Tool errors.
-
-
-
-
-
--
-
-**version_description:** `typing.Optional[str]` — An optional description of the Tool version.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.tools.delete_tool(...) -> AsyncHttpResponse[None]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Deletes a **Tool** and its versions.
-
-Refer to our [tool use](/docs/speech-to-speech-evi/features/tool-use#function-calling) guide for comprehensive instructions on defining and integrating tools into EVI.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.tools.delete_tool(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Tool. Formatted as a UUID.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.tools.update_tool_name(...) -> AsyncHttpResponse[str]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Updates the name of a **Tool**.
-
-Refer to our [tool use](/docs/speech-to-speech-evi/features/tool-use#function-calling) guide for comprehensive instructions on defining and integrating tools into EVI.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.tools.update_tool_name(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
- name="get_current_temperature",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Tool. Formatted as a UUID.
-
-
-
-
-
--
-
-**name:** `str`
-
-Name of the built-in tool to use. Hume supports the following built-in tools:
-
-- **web_search:** enables EVI to search the web for up-to-date information when applicable.
-- **hang_up:** closes the WebSocket connection when appropriate (e.g., after detecting a farewell in the conversation).
-
-For more information, see our guide on [using built-in tools](/docs/speech-to-speech-evi/features/tool-use#using-built-in-tools).
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.tools.get_tool_version(...) -> AsyncHttpResponse[typing.Optional[ReturnUserDefinedTool]]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Fetches a specified version of a **Tool**.
-
-Refer to our [tool use](/docs/speech-to-speech-evi/features/tool-use#function-calling) guide for comprehensive instructions on defining and integrating tools into EVI.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.tools.get_tool_version(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
- version=1,
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Tool. Formatted as a UUID.
-
-
-
-
-
--
-
-**version:** `int`
-
-Version number for a Tool.
-
-Tools, Configs, Custom Voices, and Prompts are versioned. This versioning system supports iterative development, allowing you to progressively refine tools and revert to previous versions if needed.
-
-Version numbers are integer values representing different iterations of the Tool. Each update to the Tool increments its version number.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.tools.delete_tool_version(...) -> AsyncHttpResponse[None]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Deletes a specified version of a **Tool**.
-
-Refer to our [tool use](/docs/speech-to-speech-evi/features/tool-use#function-calling) guide for comprehensive instructions on defining and integrating tools into EVI.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.tools.delete_tool_version(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
- version=1,
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Tool. Formatted as a UUID.
-
-
-
-
-
--
-
-**version:** `int`
-
-Version number for a Tool.
-
-Tools, Configs, Custom Voices, and Prompts are versioned. This versioning system supports iterative development, allowing you to progressively refine tools and revert to previous versions if needed.
-
-Version numbers are integer values representing different iterations of the Tool. Each update to the Tool increments its version number.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.empathic_voice.tools.update_tool_description(...) -> AsyncHttpResponse[typing.Optional[ReturnUserDefinedTool]]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Updates the description of a specified **Tool** version.
-
-Refer to our [tool use](/docs/speech-to-speech-evi/features/tool-use#function-calling) guide for comprehensive instructions on defining and integrating tools into EVI.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.empathic_voice.tools.update_tool_description(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
- version=1,
- version_description="Fetches current temperature, precipitation, wind speed, AQI, and other weather conditions. Uses Celsius, Fahrenheit, or kelvin depending on user's region.",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — Identifier for a Tool. Formatted as a UUID.
-
-
-
-
-
--
-
-**version:** `int`
-
-Version number for a Tool.
-
-Tools, Configs, Custom Voices, and Prompts are versioned. This versioning system supports iterative development, allowing you to progressively refine tools and revert to previous versions if needed.
-
-Version numbers are integer values representing different iterations of the Tool. Each update to the Tool increments its version number.
-
-
-
-
-
--
-
-**version_description:** `typing.Optional[str]` — An optional description of the Tool version.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-## ExpressionMeasurement Batch
-client.expression_measurement.batch.list_jobs(...) -> AsyncHttpResponse[typing.List[UnionJob]]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Sort and filter jobs.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.expression_measurement.batch.list_jobs()
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**limit:** `typing.Optional[int]` — The maximum number of jobs to include in the response.
-
-
-
-
-
--
-
-**status:** `typing.Optional[typing.Union[Status, typing.Sequence[Status]]]`
-
-Include only jobs of this status in the response. There are four possible statuses:
-
-- `QUEUED`: The job has been received and is waiting to be processed.
-
-- `IN_PROGRESS`: The job is currently being processed.
-
-- `COMPLETED`: The job has finished processing.
-
-- `FAILED`: The job encountered an error and could not be completed successfully.
-
-
-
-
-
--
-
-**when:** `typing.Optional[When]` — Specify whether to include jobs created before or after a given `timestamp_ms`.
-
-
-
-
-
--
-
-**timestamp_ms:** `typing.Optional[int]`
-
-Provide a timestamp in milliseconds to filter jobs.
-
- When combined with the `when` parameter, you can filter jobs before or after the given timestamp. Defaults to the current Unix timestamp if one is not provided.
-
-
-
-
-
--
-
-**sort_by:** `typing.Optional[SortBy]`
-
-Specify which timestamp to sort the jobs by.
-
-- `created`: Sort jobs by the time of creation, indicated by `created_timestamp_ms`.
-
-- `started`: Sort jobs by the time processing started, indicated by `started_timestamp_ms`.
-
-- `ended`: Sort jobs by the time processing ended, indicated by `ended_timestamp_ms`.
-
-
-
-
-
--
-
-**direction:** `typing.Optional[Direction]`
-
-Specify the order in which to sort the jobs. Defaults to descending order.
-
-- `asc`: Sort in ascending order (chronological, with the oldest records first).
-
-- `desc`: Sort in descending order (reverse-chronological, with the newest records first).
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.expression_measurement.batch.start_inference_job(...) -> AsyncHttpResponse[str]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Start a new measurement inference job.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.expression_measurement.batch.start_inference_job(
- urls=["https://hume-tutorials.s3.amazonaws.com/faces.zip"],
- notify=True,
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**models:** `typing.Optional[Models]`
-
-Specify the models to use for inference.
-
-If this field is not explicitly set, then all models will run by default.
-
-
-
-
-
--
-
-**transcription:** `typing.Optional[Transcription]`
-
-
-
-
-
--
-
-**urls:** `typing.Optional[typing.Sequence[str]]`
-
-URLs to the media files to be processed. Each must be a valid public URL to a media file (see recommended input filetypes) or an archive (`.zip`, `.tar.gz`, `.tar.bz2`, `.tar.xz`) of media files.
-
-If you wish to supply more than 100 URLs, consider providing them as an archive (`.zip`, `.tar.gz`, `.tar.bz2`, `.tar.xz`).
-
-
-
-
-
--
-
-**text:** `typing.Optional[typing.Sequence[str]]` — Text supplied directly to our Emotional Language and NER models for analysis.
-
-
-
-
-
--
-
-**callback_url:** `typing.Optional[str]` — If provided, a `POST` request will be made to the URL with the generated predictions on completion or the error message on failure.
-
-
-
-
-
--
-
-**notify:** `typing.Optional[bool]` — Whether to send an email notification to the user upon job completion/failure.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.expression_measurement.batch.get_job_details(...) -> AsyncHttpResponse[UnionJob]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Get the request details and state of a given job.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.expression_measurement.batch.get_job_details(
- id="job_id",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — The unique identifier for the job.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.expression_measurement.batch.get_job_predictions(...) -> AsyncHttpResponse[typing.List[UnionPredictResult]]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Get the JSON predictions of a completed inference job.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.expression_measurement.batch.get_job_predictions(
- id="job_id",
-)
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**id:** `str` — The unique identifier for the job.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
-client.expression_measurement.batch.start_inference_job_from_local_file(...) -> AsyncHttpResponse[str]
-
--
-
-#### 📝 Description
-
-
--
-
-
--
-
-Start a new batch inference job.
-
-
-
-
-
-#### 🔌 Usage
-
-
--
-
-
--
-
-```python
-from hume import HumeClient
-
-client = HumeClient(
- api_key="YOUR_API_KEY",
-)
-client.expression_measurement.batch.start_inference_job_from_local_file()
-
-```
-
-
-
-
-
-#### ⚙️ Parameters
-
-
--
-
-
--
-
-**file:** `from __future__ import annotations
-
-typing.List[core.File]` — See core.File for more documentation
-
-
-
-
-
--
-
-**json:** `typing.Optional[InferenceBaseRequest]` — Stringified JSON object containing the inference job configuration.
-
-
-
-
-
--
-
-**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration.
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/hume/base_client.py b/src/hume/base_client.py
index 0feb2d67..26c296bc 100644
--- a/src/hume/base_client.py
+++ b/src/hume/base_client.py
@@ -6,6 +6,7 @@
import httpx
from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
+from .core.logging import LogConfig, Logger
from .environment import HumeClientEnvironment
if typing.TYPE_CHECKING:
@@ -42,6 +43,9 @@ class BaseHumeClient:
httpx_client : typing.Optional[httpx.Client]
The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration.
+ logging : typing.Optional[typing.Union[LogConfig, Logger]]
+ Configure logging for the SDK. Accepts a LogConfig dict with 'level' (debug/info/warn/error), 'logger' (custom logger implementation), and 'silent' (boolean, defaults to True) fields. You can also pass a pre-configured Logger instance.
+
Examples
--------
from hume import HumeClient
@@ -60,6 +64,7 @@ def __init__(
timeout: typing.Optional[float] = None,
follow_redirects: typing.Optional[bool] = True,
httpx_client: typing.Optional[httpx.Client] = None,
+ logging: typing.Optional[typing.Union[LogConfig, Logger]] = None,
):
_defaulted_timeout = (
timeout if timeout is not None else 60 if httpx_client is None else httpx_client.timeout.read
@@ -74,19 +79,12 @@ def __init__(
if follow_redirects is not None
else httpx.Client(timeout=_defaulted_timeout),
timeout=_defaulted_timeout,
+ logging=logging,
)
- self._tts: typing.Optional[TtsClient] = None
self._empathic_voice: typing.Optional[EmpathicVoiceClient] = None
+ self._tts: typing.Optional[TtsClient] = None
self._expression_measurement: typing.Optional[ExpressionMeasurementClient] = None
- @property
- def tts(self):
- if self._tts is None:
- from .tts.client import TtsClient # noqa: E402
-
- self._tts = TtsClient(client_wrapper=self._client_wrapper)
- return self._tts
-
@property
def empathic_voice(self):
if self._empathic_voice is None:
@@ -95,6 +93,14 @@ def empathic_voice(self):
self._empathic_voice = EmpathicVoiceClient(client_wrapper=self._client_wrapper)
return self._empathic_voice
+ @property
+ def tts(self):
+ if self._tts is None:
+ from .tts.client import TtsClient # noqa: E402
+
+ self._tts = TtsClient(client_wrapper=self._client_wrapper)
+ return self._tts
+
@property
def expression_measurement(self):
if self._expression_measurement is None:
@@ -132,6 +138,9 @@ class AsyncBaseHumeClient:
httpx_client : typing.Optional[httpx.AsyncClient]
The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration.
+ logging : typing.Optional[typing.Union[LogConfig, Logger]]
+ Configure logging for the SDK. Accepts a LogConfig dict with 'level' (debug/info/warn/error), 'logger' (custom logger implementation), and 'silent' (boolean, defaults to True) fields. You can also pass a pre-configured Logger instance.
+
Examples
--------
from hume import AsyncHumeClient
@@ -150,6 +159,7 @@ def __init__(
timeout: typing.Optional[float] = None,
follow_redirects: typing.Optional[bool] = True,
httpx_client: typing.Optional[httpx.AsyncClient] = None,
+ logging: typing.Optional[typing.Union[LogConfig, Logger]] = None,
):
_defaulted_timeout = (
timeout if timeout is not None else 60 if httpx_client is None else httpx_client.timeout.read
@@ -164,19 +174,12 @@ def __init__(
if follow_redirects is not None
else httpx.AsyncClient(timeout=_defaulted_timeout),
timeout=_defaulted_timeout,
+ logging=logging,
)
- self._tts: typing.Optional[AsyncTtsClient] = None
self._empathic_voice: typing.Optional[AsyncEmpathicVoiceClient] = None
+ self._tts: typing.Optional[AsyncTtsClient] = None
self._expression_measurement: typing.Optional[AsyncExpressionMeasurementClient] = None
- @property
- def tts(self):
- if self._tts is None:
- from .tts.client import AsyncTtsClient # noqa: E402
-
- self._tts = AsyncTtsClient(client_wrapper=self._client_wrapper)
- return self._tts
-
@property
def empathic_voice(self):
if self._empathic_voice is None:
@@ -185,6 +188,14 @@ def empathic_voice(self):
self._empathic_voice = AsyncEmpathicVoiceClient(client_wrapper=self._client_wrapper)
return self._empathic_voice
+ @property
+ def tts(self):
+ if self._tts is None:
+ from .tts.client import AsyncTtsClient # noqa: E402
+
+ self._tts = AsyncTtsClient(client_wrapper=self._client_wrapper)
+ return self._tts
+
@property
def expression_measurement(self):
if self._expression_measurement is None:
diff --git a/src/hume/core/__init__.py b/src/hume/core/__init__.py
index 466072ed..b02c161a 100644
--- a/src/hume/core/__init__.py
+++ b/src/hume/core/__init__.py
@@ -8,13 +8,13 @@
if typing.TYPE_CHECKING:
from .api_error import ApiError
from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper
- from .custom_pagination import AsyncCustomPager, SyncCustomPager
from .datetime_utils import serialize_datetime
from .events import EventEmitterMixin, EventType
from .file import File, convert_file_dict_to_httpx_tuples, with_content_type
from .http_client import AsyncHttpClient, HttpClient
from .http_response import AsyncHttpResponse, HttpResponse
from .jsonable_encoder import jsonable_encoder
+ from .logging import ConsoleLogger, ILogger, LogConfig, LogLevel, Logger, create_logger
from .pagination import AsyncPager, SyncPager
from .pydantic_utilities import (
IS_PYDANTIC_V2,
@@ -33,27 +33,31 @@
_dynamic_imports: typing.Dict[str, str] = {
"ApiError": ".api_error",
"AsyncClientWrapper": ".client_wrapper",
- "AsyncCustomPager": ".custom_pagination",
"AsyncHttpClient": ".http_client",
"AsyncHttpResponse": ".http_response",
"AsyncPager": ".pagination",
"BaseClientWrapper": ".client_wrapper",
+ "ConsoleLogger": ".logging",
"EventEmitterMixin": ".events",
"EventType": ".events",
"FieldMetadata": ".serialization",
"File": ".file",
"HttpClient": ".http_client",
"HttpResponse": ".http_response",
+ "ILogger": ".logging",
"IS_PYDANTIC_V2": ".pydantic_utilities",
"InvalidWebSocketStatus": ".websocket_compat",
+ "LogConfig": ".logging",
+ "LogLevel": ".logging",
+ "Logger": ".logging",
"RequestOptions": ".request_options",
"SyncClientWrapper": ".client_wrapper",
- "SyncCustomPager": ".custom_pagination",
"SyncPager": ".pagination",
"UniversalBaseModel": ".pydantic_utilities",
"UniversalRootModel": ".pydantic_utilities",
"convert_and_respect_annotation_metadata": ".serialization",
"convert_file_dict_to_httpx_tuples": ".file",
+ "create_logger": ".logging",
"encode_query": ".query_encoder",
"get_status_code": ".websocket_compat",
"jsonable_encoder": ".jsonable_encoder",
@@ -91,27 +95,31 @@ def __dir__():
__all__ = [
"ApiError",
"AsyncClientWrapper",
- "AsyncCustomPager",
"AsyncHttpClient",
"AsyncHttpResponse",
"AsyncPager",
"BaseClientWrapper",
+ "ConsoleLogger",
"EventEmitterMixin",
"EventType",
"FieldMetadata",
"File",
"HttpClient",
"HttpResponse",
+ "ILogger",
"IS_PYDANTIC_V2",
"InvalidWebSocketStatus",
+ "LogConfig",
+ "LogLevel",
+ "Logger",
"RequestOptions",
"SyncClientWrapper",
- "SyncCustomPager",
"SyncPager",
"UniversalBaseModel",
"UniversalRootModel",
"convert_and_respect_annotation_metadata",
"convert_file_dict_to_httpx_tuples",
+ "create_logger",
"encode_query",
"get_status_code",
"jsonable_encoder",
diff --git a/src/hume/core/client_wrapper.py b/src/hume/core/client_wrapper.py
index 42d9d486..5415e8ff 100644
--- a/src/hume/core/client_wrapper.py
+++ b/src/hume/core/client_wrapper.py
@@ -5,6 +5,7 @@
import httpx
from ..environment import HumeClientEnvironment
from .http_client import AsyncHttpClient, HttpClient
+from .logging import LogConfig, Logger
class BaseClientWrapper:
@@ -15,22 +16,24 @@ def __init__(
headers: typing.Optional[typing.Dict[str, str]] = None,
environment: HumeClientEnvironment,
timeout: typing.Optional[float] = None,
+ logging: typing.Optional[typing.Union[LogConfig, Logger]] = None,
):
self.api_key = api_key
self._headers = headers
self._environment = environment
self._timeout = timeout
+ self._logging = logging
def get_headers(self) -> typing.Dict[str, str]:
import platform
headers: typing.Dict[str, str] = {
- "User-Agent": "hume/0.13.8",
+ "User-Agent": "hume/0.13.9",
"X-Fern-Language": "Python",
"X-Fern-Runtime": f"python/{platform.python_version()}",
"X-Fern-Platform": f"{platform.system().lower()}/{platform.release()}",
"X-Fern-SDK-Name": "hume",
- "X-Fern-SDK-Version": "0.13.8",
+ "X-Fern-SDK-Version": "0.13.9",
**(self.get_custom_headers() or {}),
}
if self.api_key is not None:
@@ -55,11 +58,15 @@ def __init__(
headers: typing.Optional[typing.Dict[str, str]] = None,
environment: HumeClientEnvironment,
timeout: typing.Optional[float] = None,
+ logging: typing.Optional[typing.Union[LogConfig, Logger]] = None,
httpx_client: httpx.Client,
):
- super().__init__(api_key=api_key, headers=headers, environment=environment, timeout=timeout)
+ super().__init__(api_key=api_key, headers=headers, environment=environment, timeout=timeout, logging=logging)
self.httpx_client = HttpClient(
- httpx_client=httpx_client, base_headers=self.get_headers, base_timeout=self.get_timeout
+ httpx_client=httpx_client,
+ base_headers=self.get_headers,
+ base_timeout=self.get_timeout,
+ logging_config=self._logging,
)
@@ -71,16 +78,18 @@ def __init__(
headers: typing.Optional[typing.Dict[str, str]] = None,
environment: HumeClientEnvironment,
timeout: typing.Optional[float] = None,
+ logging: typing.Optional[typing.Union[LogConfig, Logger]] = None,
async_token: typing.Optional[typing.Callable[[], typing.Awaitable[str]]] = None,
httpx_client: httpx.AsyncClient,
):
- super().__init__(api_key=api_key, headers=headers, environment=environment, timeout=timeout)
+ super().__init__(api_key=api_key, headers=headers, environment=environment, timeout=timeout, logging=logging)
self._async_token = async_token
self.httpx_client = AsyncHttpClient(
httpx_client=httpx_client,
base_headers=self.get_headers,
base_timeout=self.get_timeout,
async_base_headers=self.async_get_headers,
+ logging_config=self._logging,
)
async def async_get_headers(self) -> typing.Dict[str, str]:
diff --git a/src/hume/core/custom_pagination.py b/src/hume/core/custom_pagination.py
deleted file mode 100644
index 5de2c7a8..00000000
--- a/src/hume/core/custom_pagination.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# This file was auto-generated by Fern from our API Definition.
-
-"""
-Custom Pagination Support
-
-This file is designed to be modified by SDK users to implement their own
-pagination logic. The generator will import SyncCustomPager and AsyncCustomPager
-from this module when custom pagination is used.
-
-Users should:
-1. Implement their custom pager (e.g., PayrocPager, MyCustomPager, etc.)
-2. Create adapter classes (SyncCustomPager/AsyncCustomPager) that bridge
- between the generated SDK code and their custom pager implementation
-"""
-
-from __future__ import annotations
-
-from typing import Any, AsyncIterator, Generic, Iterator, TypeVar
-
-# Import the base utilities you'll need
-# Adjust these imports based on your actual structure
-try:
- from .client_wrapper import AsyncClientWrapper, SyncClientWrapper
-except ImportError:
- # Fallback for type hints
- AsyncClientWrapper = Any # type: ignore
- SyncClientWrapper = Any # type: ignore
-
-TItem = TypeVar("TItem")
-TResponse = TypeVar("TResponse")
-
-
-class SyncCustomPager(Generic[TItem, TResponse]):
- """
- Adapter for custom synchronous pagination.
-
- The generator will call this with:
- SyncCustomPager(initial_response=response, client_wrapper=client_wrapper)
-
- Implement this class to extract pagination metadata from your response
- and delegate to your custom pager implementation.
-
- Example implementation:
-
- class SyncCustomPager(Generic[TItem, TResponse]):
- def __init__(
- self,
- *,
- initial_response: TResponse,
- client_wrapper: SyncClientWrapper,
- ):
- # Extract data and pagination metadata from response
- data = initial_response.data # Adjust based on your response structure
- links = initial_response.links
-
- # Initialize your custom pager
- self._pager = MyCustomPager(
- current_page=Page(data),
- httpx_client=client_wrapper.httpx_client,
- get_headers=client_wrapper.get_headers,
- # ... other parameters
- )
-
- def __iter__(self):
- return iter(self._pager)
-
- # Delegate other methods to your pager...
- """
-
- def __init__(
- self,
- *,
- initial_response: TResponse,
- client_wrapper: SyncClientWrapper,
- ):
- """
- Initialize the custom pager.
-
- Args:
- initial_response: The parsed API response from the first request
- client_wrapper: The client wrapper providing HTTP client and utilities
- """
- raise NotImplementedError(
- "SyncCustomPager must be implemented. "
- "Please implement this class in core/custom_pagination.py to define your pagination logic. "
- "See the class docstring for examples."
- )
-
- def __iter__(self) -> Iterator[TItem]:
- """Iterate through all items across all pages."""
- raise NotImplementedError("Must implement __iter__ method")
-
-
-class AsyncCustomPager(Generic[TItem, TResponse]):
- """
- Adapter for custom asynchronous pagination.
-
- The generator will call this with:
- AsyncCustomPager(initial_response=response, client_wrapper=client_wrapper)
-
- Implement this class to extract pagination metadata from your response
- and delegate to your custom async pager implementation.
-
- Example implementation:
-
- class AsyncCustomPager(Generic[TItem, TResponse]):
- def __init__(
- self,
- *,
- initial_response: TResponse,
- client_wrapper: AsyncClientWrapper,
- ):
- # Extract data and pagination metadata from response
- data = initial_response.data # Adjust based on your response structure
- links = initial_response.links
-
- # Initialize your custom async pager
- self._pager = MyAsyncCustomPager(
- current_page=Page(data),
- httpx_client=client_wrapper.httpx_client,
- get_headers=client_wrapper.get_headers,
- # ... other parameters
- )
-
- async def __aiter__(self):
- return self._pager.__aiter__()
-
- # Delegate other methods to your pager...
- """
-
- def __init__(
- self,
- *,
- initial_response: TResponse,
- client_wrapper: AsyncClientWrapper,
- ):
- """
- Initialize the custom async pager.
-
- Args:
- initial_response: The parsed API response from the first request
- client_wrapper: The client wrapper providing HTTP client and utilities
- """
- raise NotImplementedError(
- "AsyncCustomPager must be implemented. "
- "Please implement this class in core/custom_pagination.py to define your pagination logic. "
- "See the class docstring for examples."
- )
-
- async def __aiter__(self) -> AsyncIterator[TItem]:
- """Asynchronously iterate through all items across all pages."""
- raise NotImplementedError("Must implement __aiter__ method")
diff --git a/src/hume/core/http_client.py b/src/hume/core/http_client.py
index 7c6c936f..ee937589 100644
--- a/src/hume/core/http_client.py
+++ b/src/hume/core/http_client.py
@@ -12,6 +12,7 @@
from .file import File, convert_file_dict_to_httpx_tuples
from .force_multipart import FORCE_MULTIPART
from .jsonable_encoder import jsonable_encoder
+from .logging import LogConfig, Logger, create_logger
from .query_encoder import encode_query
from .remove_none_from_dict import remove_none_from_dict as remove_none_from_dict
from .request_options import RequestOptions
@@ -122,6 +123,32 @@ def _should_retry(response: httpx.Response) -> bool:
return response.status_code >= 500 or response.status_code in retryable_400s
+_SENSITIVE_HEADERS = frozenset(
+ {
+ "authorization",
+ "www-authenticate",
+ "x-api-key",
+ "api-key",
+ "apikey",
+ "x-api-token",
+ "x-auth-token",
+ "auth-token",
+ "cookie",
+ "set-cookie",
+ "proxy-authorization",
+ "proxy-authenticate",
+ "x-csrf-token",
+ "x-xsrf-token",
+ "x-session-token",
+ "x-access-token",
+ }
+)
+
+
+def _redact_headers(headers: typing.Dict[str, str]) -> typing.Dict[str, str]:
+ return {k: ("[REDACTED]" if k.lower() in _SENSITIVE_HEADERS else v) for k, v in headers.items()}
+
+
def _build_url(base_url: str, path: typing.Optional[str]) -> str:
"""
Build a full URL by joining a base URL with a path.
@@ -238,11 +265,13 @@ def __init__(
base_timeout: typing.Callable[[], typing.Optional[float]],
base_headers: typing.Callable[[], typing.Dict[str, str]],
base_url: typing.Optional[typing.Callable[[], str]] = None,
+ logging_config: typing.Optional[typing.Union[LogConfig, Logger]] = None,
):
self.base_url = base_url
self.base_timeout = base_timeout
self.base_headers = base_headers
self.httpx_client = httpx_client
+ self.logger = create_logger(logging_config)
def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
base_url = maybe_base_url
@@ -315,18 +344,30 @@ def request(
)
)
+ _request_url = _build_url(base_url, path)
+ _request_headers = jsonable_encoder(
+ remove_none_from_dict(
+ {
+ **self.base_headers(),
+ **(headers if headers is not None else {}),
+ **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
+ }
+ )
+ )
+
+ if self.logger.is_debug():
+ self.logger.debug(
+ "Making HTTP request",
+ method=method,
+ url=_request_url,
+ headers=_redact_headers(_request_headers),
+ has_body=json_body is not None or data_body is not None,
+ )
+
response = self.httpx_client.request(
method=method,
- url=_build_url(base_url, path),
- headers=jsonable_encoder(
- remove_none_from_dict(
- {
- **self.base_headers(),
- **(headers if headers is not None else {}),
- **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
- }
- )
- ),
+ url=_request_url,
+ headers=_request_headers,
params=_encoded_params if _encoded_params else None,
json=json_body,
data=data_body,
@@ -353,6 +394,24 @@ def request(
omit=omit,
)
+ if self.logger.is_debug():
+ if 200 <= response.status_code < 400:
+ self.logger.debug(
+ "HTTP request succeeded",
+ method=method,
+ url=_request_url,
+ status_code=response.status_code,
+ )
+
+ if self.logger.is_error():
+ if response.status_code >= 400:
+ self.logger.error(
+ "HTTP request failed with error status",
+ method=method,
+ url=_request_url,
+ status_code=response.status_code,
+ )
+
return response
@contextmanager
@@ -418,18 +477,29 @@ def stream(
)
)
+ _request_url = _build_url(base_url, path)
+ _request_headers = jsonable_encoder(
+ remove_none_from_dict(
+ {
+ **self.base_headers(),
+ **(headers if headers is not None else {}),
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
+ }
+ )
+ )
+
+ if self.logger.is_debug():
+ self.logger.debug(
+ "Making streaming HTTP request",
+ method=method,
+ url=_request_url,
+ headers=_redact_headers(_request_headers),
+ )
+
with self.httpx_client.stream(
method=method,
- url=_build_url(base_url, path),
- headers=jsonable_encoder(
- remove_none_from_dict(
- {
- **self.base_headers(),
- **(headers if headers is not None else {}),
- **(request_options.get("additional_headers", {}) if request_options is not None else {}),
- }
- )
- ),
+ url=_request_url,
+ headers=_request_headers,
params=_encoded_params if _encoded_params else None,
json=json_body,
data=data_body,
@@ -449,12 +519,14 @@ def __init__(
base_headers: typing.Callable[[], typing.Dict[str, str]],
base_url: typing.Optional[typing.Callable[[], str]] = None,
async_base_headers: typing.Optional[typing.Callable[[], typing.Awaitable[typing.Dict[str, str]]]] = None,
+ logging_config: typing.Optional[typing.Union[LogConfig, Logger]] = None,
):
self.base_url = base_url
self.base_timeout = base_timeout
self.base_headers = base_headers
self.async_base_headers = async_base_headers
self.httpx_client = httpx_client
+ self.logger = create_logger(logging_config)
async def _get_headers(self) -> typing.Dict[str, str]:
if self.async_base_headers is not None:
@@ -535,19 +607,30 @@ async def request(
)
)
- # Add the input to each of these and do None-safety checks
+ _request_url = _build_url(base_url, path)
+ _request_headers = jsonable_encoder(
+ remove_none_from_dict(
+ {
+ **_headers,
+ **(headers if headers is not None else {}),
+ **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
+ }
+ )
+ )
+
+ if self.logger.is_debug():
+ self.logger.debug(
+ "Making HTTP request",
+ method=method,
+ url=_request_url,
+ headers=_redact_headers(_request_headers),
+ has_body=json_body is not None or data_body is not None,
+ )
+
response = await self.httpx_client.request(
method=method,
- url=_build_url(base_url, path),
- headers=jsonable_encoder(
- remove_none_from_dict(
- {
- **_headers,
- **(headers if headers is not None else {}),
- **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
- }
- )
- ),
+ url=_request_url,
+ headers=_request_headers,
params=_encoded_params if _encoded_params else None,
json=json_body,
data=data_body,
@@ -573,6 +656,25 @@ async def request(
retries=retries + 1,
omit=omit,
)
+
+ if self.logger.is_debug():
+ if 200 <= response.status_code < 400:
+ self.logger.debug(
+ "HTTP request succeeded",
+ method=method,
+ url=_request_url,
+ status_code=response.status_code,
+ )
+
+ if self.logger.is_error():
+ if response.status_code >= 400:
+ self.logger.error(
+ "HTTP request failed with error status",
+ method=method,
+ url=_request_url,
+ status_code=response.status_code,
+ )
+
return response
@asynccontextmanager
@@ -641,18 +743,29 @@ async def stream(
)
)
+ _request_url = _build_url(base_url, path)
+ _request_headers = jsonable_encoder(
+ remove_none_from_dict(
+ {
+ **_headers,
+ **(headers if headers is not None else {}),
+ **(request_options.get("additional_headers", {}) if request_options is not None else {}),
+ }
+ )
+ )
+
+ if self.logger.is_debug():
+ self.logger.debug(
+ "Making streaming HTTP request",
+ method=method,
+ url=_request_url,
+ headers=_redact_headers(_request_headers),
+ )
+
async with self.httpx_client.stream(
method=method,
- url=_build_url(base_url, path),
- headers=jsonable_encoder(
- remove_none_from_dict(
- {
- **_headers,
- **(headers if headers is not None else {}),
- **(request_options.get("additional_headers", {}) if request_options is not None else {}),
- }
- )
- ),
+ url=_request_url,
+ headers=_request_headers,
params=_encoded_params if _encoded_params else None,
json=json_body,
data=data_body,
diff --git a/src/hume/core/logging.py b/src/hume/core/logging.py
new file mode 100644
index 00000000..e5e57245
--- /dev/null
+++ b/src/hume/core/logging.py
@@ -0,0 +1,107 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import logging
+import typing
+
+LogLevel = typing.Literal["debug", "info", "warn", "error"]
+
+_LOG_LEVEL_MAP: typing.Dict[LogLevel, int] = {
+ "debug": 1,
+ "info": 2,
+ "warn": 3,
+ "error": 4,
+}
+
+
+class ILogger(typing.Protocol):
+ def debug(self, message: str, **kwargs: typing.Any) -> None: ...
+ def info(self, message: str, **kwargs: typing.Any) -> None: ...
+ def warn(self, message: str, **kwargs: typing.Any) -> None: ...
+ def error(self, message: str, **kwargs: typing.Any) -> None: ...
+
+
+class ConsoleLogger:
+ _logger: logging.Logger
+
+ def __init__(self) -> None:
+ self._logger = logging.getLogger("fern")
+ if not self._logger.handlers:
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter("%(levelname)s - %(message)s"))
+ self._logger.addHandler(handler)
+ self._logger.setLevel(logging.DEBUG)
+
+ def debug(self, message: str, **kwargs: typing.Any) -> None:
+ self._logger.debug(message, extra=kwargs)
+
+ def info(self, message: str, **kwargs: typing.Any) -> None:
+ self._logger.info(message, extra=kwargs)
+
+ def warn(self, message: str, **kwargs: typing.Any) -> None:
+ self._logger.warning(message, extra=kwargs)
+
+ def error(self, message: str, **kwargs: typing.Any) -> None:
+ self._logger.error(message, extra=kwargs)
+
+
+class LogConfig(typing.TypedDict, total=False):
+ level: LogLevel
+ logger: ILogger
+ silent: bool
+
+
+class Logger:
+ _level: int
+ _logger: ILogger
+ _silent: bool
+
+ def __init__(self, *, level: LogLevel, logger: ILogger, silent: bool) -> None:
+ self._level = _LOG_LEVEL_MAP[level]
+ self._logger = logger
+ self._silent = silent
+
+ def _should_log(self, level: LogLevel) -> bool:
+ return not self._silent and self._level <= _LOG_LEVEL_MAP[level]
+
+ def is_debug(self) -> bool:
+ return self._should_log("debug")
+
+ def is_info(self) -> bool:
+ return self._should_log("info")
+
+ def is_warn(self) -> bool:
+ return self._should_log("warn")
+
+ def is_error(self) -> bool:
+ return self._should_log("error")
+
+ def debug(self, message: str, **kwargs: typing.Any) -> None:
+ if self.is_debug():
+ self._logger.debug(message, **kwargs)
+
+ def info(self, message: str, **kwargs: typing.Any) -> None:
+ if self.is_info():
+ self._logger.info(message, **kwargs)
+
+ def warn(self, message: str, **kwargs: typing.Any) -> None:
+ if self.is_warn():
+ self._logger.warn(message, **kwargs)
+
+ def error(self, message: str, **kwargs: typing.Any) -> None:
+ if self.is_error():
+ self._logger.error(message, **kwargs)
+
+
+_default_logger: Logger = Logger(level="info", logger=ConsoleLogger(), silent=True)
+
+
+def create_logger(config: typing.Optional[typing.Union[LogConfig, Logger]] = None) -> Logger:
+ if config is None:
+ return _default_logger
+ if isinstance(config, Logger):
+ return config
+ return Logger(
+ level=config.get("level", "info"),
+ logger=config.get("logger", ConsoleLogger()),
+ silent=config.get("silent", True),
+ )
diff --git a/src/hume/core/pydantic_utilities.py b/src/hume/core/pydantic_utilities.py
index 789081b0..831aadc3 100644
--- a/src/hume/core/pydantic_utilities.py
+++ b/src/hume/core/pydantic_utilities.py
@@ -35,14 +35,31 @@
IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
if IS_PYDANTIC_V2:
- from pydantic.v1.datetime_parse import parse_date as parse_date
- from pydantic.v1.datetime_parse import parse_datetime as parse_datetime
- from pydantic.v1.fields import ModelField as ModelField
- from pydantic.v1.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[attr-defined]
- from pydantic.v1.typing import get_args as get_args
- from pydantic.v1.typing import get_origin as get_origin
- from pydantic.v1.typing import is_literal_type as is_literal_type
- from pydantic.v1.typing import is_union as is_union
+ import warnings
+
+ _datetime_adapter = pydantic.TypeAdapter(dt.datetime) # type: ignore[attr-defined]
+ _date_adapter = pydantic.TypeAdapter(dt.date) # type: ignore[attr-defined]
+
+ def parse_datetime(value: Any) -> dt.datetime: # type: ignore[misc]
+ if isinstance(value, dt.datetime):
+ return value
+ return _datetime_adapter.validate_python(value)
+
+ def parse_date(value: Any) -> dt.date: # type: ignore[misc]
+ if isinstance(value, dt.datetime):
+ return value.date()
+ if isinstance(value, dt.date):
+ return value
+ return _date_adapter.validate_python(value)
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", UserWarning)
+ from pydantic.v1.fields import ModelField as ModelField
+ from pydantic.v1.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[attr-defined]
+ from pydantic.v1.typing import get_args as get_args
+ from pydantic.v1.typing import get_origin as get_origin
+ from pydantic.v1.typing import is_literal_type as is_literal_type
+ from pydantic.v1.typing import is_union as is_union
else:
from pydantic.datetime_parse import parse_date as parse_date # type: ignore[no-redef]
from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore[no-redef]
diff --git a/src/hume/empathic_voice/__init__.py b/src/hume/empathic_voice/__init__.py
index 89874760..1d24f3b0 100644
--- a/src/hume/empathic_voice/__init__.py
+++ b/src/hume/empathic_voice/__init__.py
@@ -52,6 +52,7 @@
PostedUserDefinedToolSpec,
PostedWebhookEventType,
PostedWebhookSpec,
+ PromptExpansionSpec,
ProsodyInference,
ResumeAssistantMessage,
ReturnBuiltinTool,
@@ -174,6 +175,7 @@
"PostedUserDefinedToolSpec": ".types",
"PostedWebhookEventType": ".types",
"PostedWebhookSpec": ".types",
+ "PromptExpansionSpec": ".types",
"ProsodyInference": ".types",
"PublishEvent": ".chat",
"ResumeAssistantMessage": ".types",
@@ -325,6 +327,7 @@ def __dir__():
"PostedUserDefinedToolSpec",
"PostedWebhookEventType",
"PostedWebhookSpec",
+ "PromptExpansionSpec",
"ProsodyInference",
"PublishEvent",
"ResumeAssistantMessage",
diff --git a/src/hume/empathic_voice/chat_groups/client.py b/src/hume/empathic_voice/chat_groups/client.py
index e59a79a3..13d3ae5b 100644
--- a/src/hume/empathic_voice/chat_groups/client.py
+++ b/src/hume/empathic_voice/chat_groups/client.py
@@ -80,7 +80,7 @@ def list_chat_groups(
page_number=0,
page_size=1,
ascending_order=True,
- config_id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ config_id="your-config-id",
)
for item in response:
yield item
@@ -146,7 +146,7 @@ def get_chat_group(
api_key="YOUR_API_KEY",
)
client.empathic_voice.chat_groups.get_chat_group(
- id="697056f0-6c7e-487d-9bd8-9c19df79f05f",
+ id="your-chat-group-id",
page_number=0,
page_size=1,
ascending_order=True,
@@ -208,7 +208,7 @@ def get_audio(
api_key="YOUR_API_KEY",
)
client.empathic_voice.chat_groups.get_audio(
- id="369846cf-6ad5-404d-905e-a8acb5cdfc78",
+ id="your-chat-group-id",
page_number=0,
page_size=10,
ascending_order=True,
@@ -269,7 +269,7 @@ def list_chat_group_events(
api_key="YOUR_API_KEY",
)
response = client.empathic_voice.chat_groups.list_chat_group_events(
- id="697056f0-6c7e-487d-9bd8-9c19df79f05f",
+ id="your-chat-group-id",
page_number=0,
page_size=3,
ascending_order=True,
@@ -360,7 +360,7 @@ async def main() -> None:
page_number=0,
page_size=1,
ascending_order=True,
- config_id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ config_id="your-config-id",
)
async for item in response:
yield item
@@ -435,7 +435,7 @@ async def get_chat_group(
async def main() -> None:
await client.empathic_voice.chat_groups.get_chat_group(
- id="697056f0-6c7e-487d-9bd8-9c19df79f05f",
+ id="your-chat-group-id",
page_number=0,
page_size=1,
ascending_order=True,
@@ -505,7 +505,7 @@ async def get_audio(
async def main() -> None:
await client.empathic_voice.chat_groups.get_audio(
- id="369846cf-6ad5-404d-905e-a8acb5cdfc78",
+ id="your-chat-group-id",
page_number=0,
page_size=10,
ascending_order=True,
@@ -574,7 +574,7 @@ async def list_chat_group_events(
async def main() -> None:
response = await client.empathic_voice.chat_groups.list_chat_group_events(
- id="697056f0-6c7e-487d-9bd8-9c19df79f05f",
+ id="your-chat-group-id",
page_number=0,
page_size=3,
ascending_order=True,
diff --git a/src/hume/empathic_voice/chats/client.py b/src/hume/empathic_voice/chats/client.py
index 77043522..c170aa5e 100644
--- a/src/hume/empathic_voice/chats/client.py
+++ b/src/hume/empathic_voice/chats/client.py
@@ -143,7 +143,7 @@ def list_chat_events(
api_key="YOUR_API_KEY",
)
response = client.empathic_voice.chats.list_chat_events(
- id="470a49f6-1dec-4afe-8b61-035d3b2d63b0",
+ id="your-chat-id",
page_number=0,
page_size=3,
ascending_order=True,
@@ -189,7 +189,7 @@ def get_audio(
api_key="YOUR_API_KEY",
)
client.empathic_voice.chats.get_audio(
- id="470a49f6-1dec-4afe-8b61-035d3b2d63b0",
+ id="your-chat-id",
)
"""
_response = self._raw_client.get_audio(id, request_options=request_options)
@@ -340,7 +340,7 @@ async def list_chat_events(
async def main() -> None:
response = await client.empathic_voice.chats.list_chat_events(
- id="470a49f6-1dec-4afe-8b61-035d3b2d63b0",
+ id="your-chat-id",
page_number=0,
page_size=3,
ascending_order=True,
@@ -395,7 +395,7 @@ async def get_audio(
async def main() -> None:
await client.empathic_voice.chats.get_audio(
- id="470a49f6-1dec-4afe-8b61-035d3b2d63b0",
+ id="your-chat-id",
)
diff --git a/src/hume/empathic_voice/configs/client.py b/src/hume/empathic_voice/configs/client.py
index b223556f..dc191e79 100644
--- a/src/hume/empathic_voice/configs/client.py
+++ b/src/hume/empathic_voice/configs/client.py
@@ -186,7 +186,7 @@ def create_config(
client.empathic_voice.configs.create_config(
name="Weather Assistant Config",
prompt=PostedConfigPromptSpec(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="",
version=0,
),
evi_version="3",
@@ -281,7 +281,7 @@ def list_config_versions(
api_key="YOUR_API_KEY",
)
response = client.empathic_voice.configs.list_config_versions(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
)
for item in response:
yield item
@@ -378,11 +378,11 @@ def create_config_version(
api_key="YOUR_API_KEY",
)
client.empathic_voice.configs.create_config_version(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
version_description="This is an updated version of the Weather Assistant Config.",
evi_version="3",
prompt=PostedConfigPromptSpec(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="",
version=0,
),
voice=VoiceName(
@@ -457,7 +457,7 @@ def delete_config(self, id: str, *, request_options: typing.Optional[RequestOpti
api_key="YOUR_API_KEY",
)
client.empathic_voice.configs.delete_config(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
)
"""
_response = self._raw_client.delete_config(id, request_options=request_options)
@@ -493,7 +493,7 @@ def update_config_name(self, id: str, *, name: str, request_options: typing.Opti
api_key="YOUR_API_KEY",
)
client.empathic_voice.configs.update_config_name(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
name="Updated Weather Assistant Config Name",
)
"""
@@ -536,7 +536,7 @@ def get_config_version(
api_key="YOUR_API_KEY",
)
client.empathic_voice.configs.get_config_version(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
version=1,
)
"""
@@ -578,7 +578,7 @@ def delete_config_version(
api_key="YOUR_API_KEY",
)
client.empathic_voice.configs.delete_config_version(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
version=1,
)
"""
@@ -629,7 +629,7 @@ def update_config_description(
api_key="YOUR_API_KEY",
)
client.empathic_voice.configs.update_config_description(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
version=1,
version_description="This is an updated version_description.",
)
@@ -817,7 +817,7 @@ async def main() -> None:
await client.empathic_voice.configs.create_config(
name="Weather Assistant Config",
prompt=PostedConfigPromptSpec(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="",
version=0,
),
evi_version="3",
@@ -920,7 +920,7 @@ async def list_config_versions(
async def main() -> None:
response = await client.empathic_voice.configs.list_config_versions(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
)
async for item in response:
yield item
@@ -1026,11 +1026,11 @@ async def create_config_version(
async def main() -> None:
await client.empathic_voice.configs.create_config_version(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
version_description="This is an updated version of the Weather Assistant Config.",
evi_version="3",
prompt=PostedConfigPromptSpec(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="",
version=0,
),
voice=VoiceName(
@@ -1113,7 +1113,7 @@ async def delete_config(self, id: str, *, request_options: typing.Optional[Reque
async def main() -> None:
await client.empathic_voice.configs.delete_config(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
)
@@ -1159,7 +1159,7 @@ async def update_config_name(
async def main() -> None:
await client.empathic_voice.configs.update_config_name(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
name="Updated Weather Assistant Config Name",
)
@@ -1210,7 +1210,7 @@ async def get_config_version(
async def main() -> None:
await client.empathic_voice.configs.get_config_version(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
version=1,
)
@@ -1260,7 +1260,7 @@ async def delete_config_version(
async def main() -> None:
await client.empathic_voice.configs.delete_config_version(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
version=1,
)
@@ -1319,7 +1319,7 @@ async def update_config_description(
async def main() -> None:
await client.empathic_voice.configs.update_config_description(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
version=1,
version_description="This is an updated version_description.",
)
diff --git a/src/hume/empathic_voice/prompts/client.py b/src/hume/empathic_voice/prompts/client.py
index 8a0311f0..55abc087 100644
--- a/src/hume/empathic_voice/prompts/client.py
+++ b/src/hume/empathic_voice/prompts/client.py
@@ -5,6 +5,7 @@
from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
from ...core.pagination import AsyncPager, SyncPager
from ...core.request_options import RequestOptions
+from ..types.prompt_expansion_spec import PromptExpansionSpec
from ..types.return_paged_prompts import ReturnPagedPrompts
from ..types.return_prompt import ReturnPrompt
from .raw_client import AsyncRawPromptsClient, RawPromptsClient
@@ -98,6 +99,7 @@ def create_prompt(
*,
name: str,
text: str,
+ prompt_expansion: typing.Optional[PromptExpansionSpec] = OMIT,
version_description: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> typing.Optional[ReturnPrompt]:
@@ -116,6 +118,8 @@ def create_prompt(
You can use the Prompt to define a specific goal or role for EVI, specifying how it should act or what it should focus on during the conversation. For example, EVI can be instructed to act as a customer support representative, a fitness coach, or a travel advisor, each with its own set of behaviors and response styles. For help writing a system prompt, see our [Prompting Guide](/docs/speech-to-speech-evi/guides/prompting).
+ prompt_expansion : typing.Optional[PromptExpansionSpec]
+
version_description : typing.Optional[str]
An optional description of the Prompt version.
@@ -140,7 +144,11 @@ def create_prompt(
)
"""
_response = self._raw_client.create_prompt(
- name=name, text=text, version_description=version_description, request_options=request_options
+ name=name,
+ text=text,
+ prompt_expansion=prompt_expansion,
+ version_description=version_description,
+ request_options=request_options,
)
return _response.data
@@ -191,7 +199,7 @@ def list_prompt_versions(
api_key="YOUR_API_KEY",
)
client.empathic_voice.prompts.list_prompt_versions(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
)
"""
_response = self._raw_client.list_prompt_versions(
@@ -208,6 +216,7 @@ def create_prompt_version(
id: str,
*,
text: str,
+ prompt_expansion: typing.Optional[PromptExpansionSpec] = OMIT,
version_description: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> typing.Optional[ReturnPrompt]:
@@ -226,6 +235,8 @@ def create_prompt_version(
You can use the Prompt to define a specific goal or role for EVI, specifying how it should act or what it should focus on during the conversation. For example, EVI can be instructed to act as a customer support representative, a fitness coach, or a travel advisor, each with its own set of behaviors and response styles. For help writing a system prompt, see our [Prompting Guide](/docs/speech-to-speech-evi/guides/prompting).
+ prompt_expansion : typing.Optional[PromptExpansionSpec]
+
version_description : typing.Optional[str]
An optional description of the Prompt version.
@@ -245,13 +256,17 @@ def create_prompt_version(
api_key="YOUR_API_KEY",
)
client.empathic_voice.prompts.create_prompt_version(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
text="You are an updated version of an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.",
version_description="This is an updated version of the Weather Assistant Prompt.",
)
"""
_response = self._raw_client.create_prompt_version(
- id, text=text, version_description=version_description, request_options=request_options
+ id,
+ text=text,
+ prompt_expansion=prompt_expansion,
+ version_description=version_description,
+ request_options=request_options,
)
return _response.data
@@ -281,7 +296,7 @@ def delete_prompt(self, id: str, *, request_options: typing.Optional[RequestOpti
api_key="YOUR_API_KEY",
)
client.empathic_voice.prompts.delete_prompt(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
)
"""
_response = self._raw_client.delete_prompt(id, request_options=request_options)
@@ -317,7 +332,7 @@ def update_prompt_name(self, id: str, *, name: str, request_options: typing.Opti
api_key="YOUR_API_KEY",
)
client.empathic_voice.prompts.update_prompt_name(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
name="Updated Weather Assistant Prompt Name",
)
"""
@@ -360,7 +375,7 @@ def get_prompt_version(
api_key="YOUR_API_KEY",
)
client.empathic_voice.prompts.get_prompt_version(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
version=0,
)
"""
@@ -402,7 +417,7 @@ def delete_prompt_version(
api_key="YOUR_API_KEY",
)
client.empathic_voice.prompts.delete_prompt_version(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
version=1,
)
"""
@@ -453,7 +468,7 @@ def update_prompt_description(
api_key="YOUR_API_KEY",
)
client.empathic_voice.prompts.update_prompt_description(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
version=1,
version_description="This is an updated version_description.",
)
@@ -558,6 +573,7 @@ async def create_prompt(
*,
name: str,
text: str,
+ prompt_expansion: typing.Optional[PromptExpansionSpec] = OMIT,
version_description: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> typing.Optional[ReturnPrompt]:
@@ -576,6 +592,8 @@ async def create_prompt(
You can use the Prompt to define a specific goal or role for EVI, specifying how it should act or what it should focus on during the conversation. For example, EVI can be instructed to act as a customer support representative, a fitness coach, or a travel advisor, each with its own set of behaviors and response styles. For help writing a system prompt, see our [Prompting Guide](/docs/speech-to-speech-evi/guides/prompting).
+ prompt_expansion : typing.Optional[PromptExpansionSpec]
+
version_description : typing.Optional[str]
An optional description of the Prompt version.
@@ -608,7 +626,11 @@ async def main() -> None:
asyncio.run(main())
"""
_response = await self._raw_client.create_prompt(
- name=name, text=text, version_description=version_description, request_options=request_options
+ name=name,
+ text=text,
+ prompt_expansion=prompt_expansion,
+ version_description=version_description,
+ request_options=request_options,
)
return _response.data
@@ -664,7 +686,7 @@ async def list_prompt_versions(
async def main() -> None:
await client.empathic_voice.prompts.list_prompt_versions(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
)
@@ -684,6 +706,7 @@ async def create_prompt_version(
id: str,
*,
text: str,
+ prompt_expansion: typing.Optional[PromptExpansionSpec] = OMIT,
version_description: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> typing.Optional[ReturnPrompt]:
@@ -702,6 +725,8 @@ async def create_prompt_version(
You can use the Prompt to define a specific goal or role for EVI, specifying how it should act or what it should focus on during the conversation. For example, EVI can be instructed to act as a customer support representative, a fitness coach, or a travel advisor, each with its own set of behaviors and response styles. For help writing a system prompt, see our [Prompting Guide](/docs/speech-to-speech-evi/guides/prompting).
+ prompt_expansion : typing.Optional[PromptExpansionSpec]
+
version_description : typing.Optional[str]
An optional description of the Prompt version.
@@ -726,7 +751,7 @@ async def create_prompt_version(
async def main() -> None:
await client.empathic_voice.prompts.create_prompt_version(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
text="You are an updated version of an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.",
version_description="This is an updated version of the Weather Assistant Prompt.",
)
@@ -735,7 +760,11 @@ async def main() -> None:
asyncio.run(main())
"""
_response = await self._raw_client.create_prompt_version(
- id, text=text, version_description=version_description, request_options=request_options
+ id,
+ text=text,
+ prompt_expansion=prompt_expansion,
+ version_description=version_description,
+ request_options=request_options,
)
return _response.data
@@ -770,7 +799,7 @@ async def delete_prompt(self, id: str, *, request_options: typing.Optional[Reque
async def main() -> None:
await client.empathic_voice.prompts.delete_prompt(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
)
@@ -816,7 +845,7 @@ async def update_prompt_name(
async def main() -> None:
await client.empathic_voice.prompts.update_prompt_name(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
name="Updated Weather Assistant Prompt Name",
)
@@ -867,7 +896,7 @@ async def get_prompt_version(
async def main() -> None:
await client.empathic_voice.prompts.get_prompt_version(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
version=0,
)
@@ -917,7 +946,7 @@ async def delete_prompt_version(
async def main() -> None:
await client.empathic_voice.prompts.delete_prompt_version(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
version=1,
)
@@ -976,7 +1005,7 @@ async def update_prompt_description(
async def main() -> None:
await client.empathic_voice.prompts.update_prompt_description(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
version=1,
version_description="This is an updated version_description.",
)
diff --git a/src/hume/empathic_voice/prompts/raw_client.py b/src/hume/empathic_voice/prompts/raw_client.py
index 62a93a45..8facc076 100644
--- a/src/hume/empathic_voice/prompts/raw_client.py
+++ b/src/hume/empathic_voice/prompts/raw_client.py
@@ -10,8 +10,10 @@
from ...core.pagination import AsyncPager, SyncPager
from ...core.pydantic_utilities import parse_obj_as
from ...core.request_options import RequestOptions
+from ...core.serialization import convert_and_respect_annotation_metadata
from ..errors.bad_request_error import BadRequestError
from ..types.error_response import ErrorResponse
+from ..types.prompt_expansion_spec import PromptExpansionSpec
from ..types.return_paged_prompts import ReturnPagedPrompts
from ..types.return_prompt import ReturnPrompt
@@ -117,6 +119,7 @@ def create_prompt(
*,
name: str,
text: str,
+ prompt_expansion: typing.Optional[PromptExpansionSpec] = OMIT,
version_description: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> HttpResponse[typing.Optional[ReturnPrompt]]:
@@ -135,6 +138,8 @@ def create_prompt(
You can use the Prompt to define a specific goal or role for EVI, specifying how it should act or what it should focus on during the conversation. For example, EVI can be instructed to act as a customer support representative, a fitness coach, or a travel advisor, each with its own set of behaviors and response styles. For help writing a system prompt, see our [Prompting Guide](/docs/speech-to-speech-evi/guides/prompting).
+ prompt_expansion : typing.Optional[PromptExpansionSpec]
+
version_description : typing.Optional[str]
An optional description of the Prompt version.
@@ -152,6 +157,9 @@ def create_prompt(
method="POST",
json={
"name": name,
+ "prompt_expansion": convert_and_respect_annotation_metadata(
+ object_=prompt_expansion, annotation=PromptExpansionSpec, direction="write"
+ ),
"text": text,
"version_description": version_description,
},
@@ -270,6 +278,7 @@ def create_prompt_version(
id: str,
*,
text: str,
+ prompt_expansion: typing.Optional[PromptExpansionSpec] = OMIT,
version_description: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> HttpResponse[typing.Optional[ReturnPrompt]]:
@@ -288,6 +297,8 @@ def create_prompt_version(
You can use the Prompt to define a specific goal or role for EVI, specifying how it should act or what it should focus on during the conversation. For example, EVI can be instructed to act as a customer support representative, a fitness coach, or a travel advisor, each with its own set of behaviors and response styles. For help writing a system prompt, see our [Prompting Guide](/docs/speech-to-speech-evi/guides/prompting).
+ prompt_expansion : typing.Optional[PromptExpansionSpec]
+
version_description : typing.Optional[str]
An optional description of the Prompt version.
@@ -304,6 +315,9 @@ def create_prompt_version(
base_url=self._client_wrapper.get_environment().base,
method="POST",
json={
+ "prompt_expansion": convert_and_respect_annotation_metadata(
+ object_=prompt_expansion, annotation=PromptExpansionSpec, direction="write"
+ ),
"text": text,
"version_description": version_description,
},
@@ -733,6 +747,7 @@ async def create_prompt(
*,
name: str,
text: str,
+ prompt_expansion: typing.Optional[PromptExpansionSpec] = OMIT,
version_description: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> AsyncHttpResponse[typing.Optional[ReturnPrompt]]:
@@ -751,6 +766,8 @@ async def create_prompt(
You can use the Prompt to define a specific goal or role for EVI, specifying how it should act or what it should focus on during the conversation. For example, EVI can be instructed to act as a customer support representative, a fitness coach, or a travel advisor, each with its own set of behaviors and response styles. For help writing a system prompt, see our [Prompting Guide](/docs/speech-to-speech-evi/guides/prompting).
+ prompt_expansion : typing.Optional[PromptExpansionSpec]
+
version_description : typing.Optional[str]
An optional description of the Prompt version.
@@ -768,6 +785,9 @@ async def create_prompt(
method="POST",
json={
"name": name,
+ "prompt_expansion": convert_and_respect_annotation_metadata(
+ object_=prompt_expansion, annotation=PromptExpansionSpec, direction="write"
+ ),
"text": text,
"version_description": version_description,
},
@@ -886,6 +906,7 @@ async def create_prompt_version(
id: str,
*,
text: str,
+ prompt_expansion: typing.Optional[PromptExpansionSpec] = OMIT,
version_description: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> AsyncHttpResponse[typing.Optional[ReturnPrompt]]:
@@ -904,6 +925,8 @@ async def create_prompt_version(
You can use the Prompt to define a specific goal or role for EVI, specifying how it should act or what it should focus on during the conversation. For example, EVI can be instructed to act as a customer support representative, a fitness coach, or a travel advisor, each with its own set of behaviors and response styles. For help writing a system prompt, see our [Prompting Guide](/docs/speech-to-speech-evi/guides/prompting).
+ prompt_expansion : typing.Optional[PromptExpansionSpec]
+
version_description : typing.Optional[str]
An optional description of the Prompt version.
@@ -920,6 +943,9 @@ async def create_prompt_version(
base_url=self._client_wrapper.get_environment().base,
method="POST",
json={
+ "prompt_expansion": convert_and_respect_annotation_metadata(
+ object_=prompt_expansion, annotation=PromptExpansionSpec, direction="write"
+ ),
"text": text,
"version_description": version_description,
},
diff --git a/src/hume/empathic_voice/tools/client.py b/src/hume/empathic_voice/tools/client.py
index 2d520228..cc436b8b 100644
--- a/src/hume/empathic_voice/tools/client.py
+++ b/src/hume/empathic_voice/tools/client.py
@@ -212,7 +212,7 @@ def list_tool_versions(
api_key="YOUR_API_KEY",
)
response = client.empathic_voice.tools.list_tool_versions(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
)
for item in response:
yield item
@@ -278,7 +278,7 @@ def create_tool_version(
api_key="YOUR_API_KEY",
)
client.empathic_voice.tools.create_tool_version(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
parameters='{ "type": "object", "properties": { "location": { "type": "string", "description": "The city and state, e.g. San Francisco, CA" }, "format": { "type": "string", "enum": ["celsius", "fahrenheit", "kelvin"], "description": "The temperature unit to use. Infer this from the users location." } }, "required": ["location", "format"] }',
version_description="Fetches current weather and uses celsius, fahrenheit, or kelvin based on location of user.",
fallback_content="Unable to fetch current weather.",
@@ -321,7 +321,7 @@ def delete_tool(self, id: str, *, request_options: typing.Optional[RequestOption
api_key="YOUR_API_KEY",
)
client.empathic_voice.tools.delete_tool(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
)
"""
_response = self._raw_client.delete_tool(id, request_options=request_options)
@@ -362,7 +362,7 @@ def update_tool_name(self, id: str, *, name: str, request_options: typing.Option
api_key="YOUR_API_KEY",
)
client.empathic_voice.tools.update_tool_name(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
name="get_current_temperature",
)
"""
@@ -405,7 +405,7 @@ def get_tool_version(
api_key="YOUR_API_KEY",
)
client.empathic_voice.tools.get_tool_version(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
version=1,
)
"""
@@ -447,7 +447,7 @@ def delete_tool_version(
api_key="YOUR_API_KEY",
)
client.empathic_voice.tools.delete_tool_version(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="",
version=1,
)
"""
@@ -498,7 +498,7 @@ def update_tool_description(
api_key="YOUR_API_KEY",
)
client.empathic_voice.tools.update_tool_description(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
version=1,
version_description="Fetches current temperature, precipitation, wind speed, AQI, and other weather conditions. Uses Celsius, Fahrenheit, or kelvin depending on user's region.",
)
@@ -730,7 +730,7 @@ async def list_tool_versions(
async def main() -> None:
response = await client.empathic_voice.tools.list_tool_versions(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
)
async for item in response:
yield item
@@ -805,7 +805,7 @@ async def create_tool_version(
async def main() -> None:
await client.empathic_voice.tools.create_tool_version(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
parameters='{ "type": "object", "properties": { "location": { "type": "string", "description": "The city and state, e.g. San Francisco, CA" }, "format": { "type": "string", "enum": ["celsius", "fahrenheit", "kelvin"], "description": "The temperature unit to use. Infer this from the users location." } }, "required": ["location", "format"] }',
version_description="Fetches current weather and uses celsius, fahrenheit, or kelvin based on location of user.",
fallback_content="Unable to fetch current weather.",
@@ -856,7 +856,7 @@ async def delete_tool(self, id: str, *, request_options: typing.Optional[Request
async def main() -> None:
await client.empathic_voice.tools.delete_tool(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
)
@@ -907,7 +907,7 @@ async def update_tool_name(
async def main() -> None:
await client.empathic_voice.tools.update_tool_name(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
name="get_current_temperature",
)
@@ -958,7 +958,7 @@ async def get_tool_version(
async def main() -> None:
await client.empathic_voice.tools.get_tool_version(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
version=1,
)
@@ -1008,7 +1008,7 @@ async def delete_tool_version(
async def main() -> None:
await client.empathic_voice.tools.delete_tool_version(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="",
version=1,
)
@@ -1067,7 +1067,7 @@ async def update_tool_description(
async def main() -> None:
await client.empathic_voice.tools.update_tool_description(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
version=1,
version_description="Fetches current temperature, precipitation, wind speed, AQI, and other weather conditions. Uses Celsius, Fahrenheit, or kelvin depending on user's region.",
)
diff --git a/src/hume/empathic_voice/types/__init__.py b/src/hume/empathic_voice/types/__init__.py
index 531427e4..525188a3 100644
--- a/src/hume/empathic_voice/types/__init__.py
+++ b/src/hume/empathic_voice/types/__init__.py
@@ -51,6 +51,7 @@
from .posted_user_defined_tool_spec import PostedUserDefinedToolSpec
from .posted_webhook_event_type import PostedWebhookEventType
from .posted_webhook_spec import PostedWebhookSpec
+ from .prompt_expansion_spec import PromptExpansionSpec
from .prosody_inference import ProsodyInference
from .resume_assistant_message import ResumeAssistantMessage
from .return_builtin_tool import ReturnBuiltinTool
@@ -170,6 +171,7 @@
"PostedUserDefinedToolSpec": ".posted_user_defined_tool_spec",
"PostedWebhookEventType": ".posted_webhook_event_type",
"PostedWebhookSpec": ".posted_webhook_spec",
+ "PromptExpansionSpec": ".prompt_expansion_spec",
"ProsodyInference": ".prosody_inference",
"ResumeAssistantMessage": ".resume_assistant_message",
"ReturnBuiltinTool": ".return_builtin_tool",
@@ -311,6 +313,7 @@ def __dir__():
"PostedUserDefinedToolSpec",
"PostedWebhookEventType",
"PostedWebhookSpec",
+ "PromptExpansionSpec",
"ProsodyInference",
"ResumeAssistantMessage",
"ReturnBuiltinTool",
diff --git a/src/hume/empathic_voice/types/language_model_type.py b/src/hume/empathic_voice/types/language_model_type.py
index 27cc612e..8e8ce5e3 100644
--- a/src/hume/empathic_voice/types/language_model_type.py
+++ b/src/hume/empathic_voice/types/language_model_type.py
@@ -28,6 +28,7 @@
"gemini-2.0-flash",
"gemini-2.5-flash",
"gemini-2.5-flash-preview-04-17",
+ "gemini-3-flash-preview",
"gpt-4-turbo",
"gpt-4-turbo-preview",
"gpt-3.5-turbo-0125",
@@ -44,6 +45,10 @@
"gpt-5-priority",
"gpt-5-mini-priority",
"gpt-5-nano-priority",
+ "gpt-5.1",
+ "gpt-5.1-priority",
+ "gpt-5.2",
+ "gpt-5.2-priority",
"gemma-7b-it",
"llama3-8b-8192",
"llama3-70b-8192",
diff --git a/src/hume/empathic_voice/types/model_provider_enum.py b/src/hume/empathic_voice/types/model_provider_enum.py
index fb46e3ab..beb2c7e1 100644
--- a/src/hume/empathic_voice/types/model_provider_enum.py
+++ b/src/hume/empathic_voice/types/model_provider_enum.py
@@ -15,6 +15,7 @@
"PERPLEXITY",
"SAMBANOVA",
"CEREBRAS",
+ "X_AI",
],
typing.Any,
]
diff --git a/src/hume/empathic_voice/types/posted_config_prompt_spec.py b/src/hume/empathic_voice/types/posted_config_prompt_spec.py
index 81eb66f9..00d28b60 100644
--- a/src/hume/empathic_voice/types/posted_config_prompt_spec.py
+++ b/src/hume/empathic_voice/types/posted_config_prompt_spec.py
@@ -4,6 +4,7 @@
import pydantic
from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
+from .prompt_expansion_spec import PromptExpansionSpec
class PostedConfigPromptSpec(UniversalBaseModel):
@@ -16,6 +17,7 @@ class PostedConfigPromptSpec(UniversalBaseModel):
Identifier for a Prompt. Formatted as a UUID.
"""
+ prompt_expansion: typing.Optional[PromptExpansionSpec] = None
text: typing.Optional[str] = pydantic.Field(default=None)
"""
Text used to create a new prompt for a particular config.
diff --git a/src/hume/empathic_voice/types/posted_ellm_model.py b/src/hume/empathic_voice/types/posted_ellm_model.py
index b7f12122..b3f64592 100644
--- a/src/hume/empathic_voice/types/posted_ellm_model.py
+++ b/src/hume/empathic_voice/types/posted_ellm_model.py
@@ -13,9 +13,7 @@ class PostedEllmModel(UniversalBaseModel):
allow_short_responses: typing.Optional[bool] = pydantic.Field(default=None)
"""
- Boolean indicating if the eLLM is allowed to generate short responses.
-
- If omitted, short responses from the eLLM are enabled by default.
+ Boolean indicating if the eLLM is allowed to generate short responses (new EVI 3 configs default to disabled; new versions inherit prior value when omitted).
"""
if IS_PYDANTIC_V2:
diff --git a/src/hume/empathic_voice/types/prompt_expansion_spec.py b/src/hume/empathic_voice/types/prompt_expansion_spec.py
new file mode 100644
index 00000000..f5e8c6bb
--- /dev/null
+++ b/src/hume/empathic_voice/types/prompt_expansion_spec.py
@@ -0,0 +1,28 @@
+# This file was auto-generated by Fern from our API Definition.
+
+import typing
+
+import pydantic
+from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
+
+
+class PromptExpansionSpec(UniversalBaseModel):
+ """
+ Configuration for prompt expansion behavior.
+ """
+
+ enabled: typing.Optional[bool] = pydantic.Field(default=None)
+ """
+ Boolean indicating whether prompt expansion is enabled.
+
+ Defaults to `true`. When set to `false`, no additional instructions are appended to the system prompt, giving full control over the prompt content. Only applicable when using an external supplemental language model.
+ """
+
+ if IS_PYDANTIC_V2:
+ model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
+ else:
+
+ class Config:
+ frozen = True
+ smart_union = True
+ extra = pydantic.Extra.allow
diff --git a/src/hume/empathic_voice/types/return_chat_event_type.py b/src/hume/empathic_voice/types/return_chat_event_type.py
index ae7d745e..97e85c3d 100644
--- a/src/hume/empathic_voice/types/return_chat_event_type.py
+++ b/src/hume/empathic_voice/types/return_chat_event_type.py
@@ -4,17 +4,19 @@
ReturnChatEventType = typing.Union[
typing.Literal[
+ "AGENT_MESSAGE",
+ "ASSISTANT_PROSODY",
+ "CHAT_START_MESSAGE",
+ "CHAT_END_MESSAGE",
"FUNCTION_CALL",
"FUNCTION_CALL_RESPONSE",
- "CHAT_END_MESSAGE",
- "AGENT_MESSAGE",
- "SYSTEM_PROMPT",
- "USER_RECORDING_START_MESSAGE",
+ "PAUSE_ONSET",
"RESUME_ONSET",
+ "SESSION_SETTINGS",
+ "SYSTEM_PROMPT",
"USER_INTERRUPTION",
- "CHAT_START_MESSAGE",
- "PAUSE_ONSET",
"USER_MESSAGE",
+ "USER_RECORDING_START_MESSAGE",
],
typing.Any,
]
diff --git a/src/hume/empathic_voice/types/return_ellm_model.py b/src/hume/empathic_voice/types/return_ellm_model.py
index 2a2a3239..f8f9ee92 100644
--- a/src/hume/empathic_voice/types/return_ellm_model.py
+++ b/src/hume/empathic_voice/types/return_ellm_model.py
@@ -13,9 +13,7 @@ class ReturnEllmModel(UniversalBaseModel):
allow_short_responses: bool = pydantic.Field()
"""
- Boolean indicating if the eLLM is allowed to generate short responses.
-
- If omitted, short responses from the eLLM are enabled by default.
+ Boolean indicating if the eLLM is allowed to generate short responses (new EVI 3 configs default to disabled; new versions inherit prior value when omitted).
"""
if IS_PYDANTIC_V2:
diff --git a/src/hume/empathic_voice/types/return_prompt.py b/src/hume/empathic_voice/types/return_prompt.py
index 3640d069..148e7968 100644
--- a/src/hume/empathic_voice/types/return_prompt.py
+++ b/src/hume/empathic_voice/types/return_prompt.py
@@ -4,6 +4,7 @@
import pydantic
from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
+from .prompt_expansion_spec import PromptExpansionSpec
class ReturnPrompt(UniversalBaseModel):
@@ -31,6 +32,7 @@ class ReturnPrompt(UniversalBaseModel):
Name applied to all versions of a particular Prompt.
"""
+ prompt_expansion: typing.Optional[PromptExpansionSpec] = None
text: str = pydantic.Field()
"""
Instructions used to shape EVI's behavior, responses, and style.
diff --git a/src/hume/tts/voices/client.py b/src/hume/tts/voices/client.py
index 056bbef3..ad6f8315 100644
--- a/src/hume/tts/voices/client.py
+++ b/src/hume/tts/voices/client.py
@@ -129,7 +129,7 @@ def create(
api_key="YOUR_API_KEY",
)
client.tts.voices.create(
- generation_id="795c949a-1510-4a80-9646-7d0863b023ab",
+ generation_id="",
name="David Hume",
)
"""
@@ -296,7 +296,7 @@ async def create(
async def main() -> None:
await client.tts.voices.create(
- generation_id="795c949a-1510-4a80-9646-7d0863b023ab",
+ generation_id="",
name="David Hume",
)
diff --git a/tests/conftest.py b/tests/conftest.py
index dd48d29f..0b38341b 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -16,35 +16,18 @@
_STARTED: bool = False
_WIREMOCK_PORT: str = "8080" # Default, will be updated after container starts
+_PROJECT_NAME: str = "hume-api"
-
-def _compose_file() -> str:
- """Returns the path to the docker-compose file for WireMock."""
- # This file lives in tests/conftest.py, so the project root is the parent of tests.
- tests_dir = os.path.dirname(__file__)
- project_root = os.path.abspath(os.path.join(tests_dir, ".."))
- wiremock_dir = os.path.join(project_root, "wiremock")
- return os.path.join(wiremock_dir, "docker-compose.test.yml")
-
-
-def _project_name() -> str:
- """Returns a unique project name for this test fixture to avoid container name conflicts."""
- tests_dir = os.path.dirname(__file__)
- project_root = os.path.abspath(os.path.join(tests_dir, ".."))
- # Use the last two directory names to create a unique project name
- # e.g., "python-streaming-parameter-openapi-with-wire-tests"
- parent = os.path.basename(os.path.dirname(project_root))
- current = os.path.basename(project_root)
- return f"{parent}-{current}".replace("_", "-").lower()
+# This file lives at tests/conftest.py, so the project root is one level up.
+_PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
+_COMPOSE_FILE = os.path.join(_PROJECT_ROOT, "wiremock", "docker-compose.test.yml")
def _get_wiremock_port() -> str:
"""Gets the dynamically assigned port for the WireMock container."""
- compose_file = _compose_file()
- project = _project_name()
try:
result = subprocess.run(
- ["docker", "compose", "-f", compose_file, "-p", project, "port", "wiremock", "8080"],
+ ["docker", "compose", "-f", _COMPOSE_FILE, "-p", _PROJECT_NAME, "port", "wiremock", "8080"],
check=True,
capture_output=True,
text=True,
@@ -62,12 +45,10 @@ def _start_wiremock() -> None:
if _STARTED:
return
- compose_file = _compose_file()
- project = _project_name()
- print(f"\nStarting WireMock container (project: {project})...")
+ print(f"\nStarting WireMock container (project: {_PROJECT_NAME})...")
try:
subprocess.run(
- ["docker", "compose", "-f", compose_file, "-p", project, "up", "-d", "--wait"],
+ ["docker", "compose", "-f", _COMPOSE_FILE, "-p", _PROJECT_NAME, "up", "-d", "--wait"],
check=True,
capture_output=True,
text=True,
@@ -83,11 +64,9 @@ def _start_wiremock() -> None:
def _stop_wiremock() -> None:
"""Stops and removes the WireMock container."""
- compose_file = _compose_file()
- project = _project_name()
print("\nStopping WireMock container...")
subprocess.run(
- ["docker", "compose", "-f", compose_file, "-p", project, "down", "-v"],
+ ["docker", "compose", "-f", _COMPOSE_FILE, "-p", _PROJECT_NAME, "down", "-v"],
check=False,
capture_output=True,
)
diff --git a/tests/wire/conftest.py b/tests/wire/conftest.py
index dd2eedb3..f2879ac8 100644
--- a/tests/wire/conftest.py
+++ b/tests/wire/conftest.py
@@ -5,19 +5,25 @@
WireMock and for verifying requests in WireMock.
The WireMock container lifecycle itself is managed by a top-level pytest
-plugin (wiremock_pytest_plugin.py) so that the container is started exactly
-once per test run, even when using pytest-xdist.
+plugin (tests/conftest.py) so that the container is started exactly once
+per test run, even when using pytest-xdist.
"""
import inspect
import os
from typing import Any, Dict, Optional
-import requests
+import httpx
from hume.client import HumeClient
from hume.environment import HumeClientEnvironment
+# Check once at import time whether the client constructor accepts a headers kwarg.
+try:
+ _CLIENT_SUPPORTS_HEADERS: bool = "headers" in inspect.signature(HumeClient).parameters
+except (TypeError, ValueError):
+ _CLIENT_SUPPORTS_HEADERS = False
+
def _get_wiremock_base_url() -> str:
"""Returns the WireMock base URL using the dynamically assigned port."""
@@ -38,18 +44,12 @@ def get_client(test_id: str) -> HumeClient:
test_headers = {"X-Test-Id": test_id}
base_url = _get_wiremock_base_url()
- # Prefer passing headers directly if the client constructor supports it.
- try:
- if "headers" in inspect.signature(HumeClient).parameters:
- return HumeClient(
- environment=HumeClientEnvironment(base=base_url, evi=base_url, tts=base_url, stream=base_url),
- headers=test_headers,
- api_key="test_api_key",
- )
- except (TypeError, ValueError):
- pass
-
- import httpx
+ if _CLIENT_SUPPORTS_HEADERS:
+ return HumeClient(
+ environment=HumeClientEnvironment(base=base_url, evi=base_url, tts=base_url, stream=base_url),
+ headers=test_headers,
+ api_key="test_api_key",
+ )
return HumeClient(
environment=HumeClientEnvironment(base=base_url, evi=base_url, tts=base_url, stream=base_url),
@@ -65,7 +65,7 @@ def verify_request_count(
query_params: Optional[Dict[str, str]],
expected: int,
) -> None:
- """Verifies the number of requests made to WireMock filtered by test ID for concurrency safety"""
+ """Verifies the number of requests made to WireMock filtered by test ID for concurrency safety."""
wiremock_admin_url = f"{_get_wiremock_base_url()}/__admin"
request_body: Dict[str, Any] = {
"method": method,
@@ -75,7 +75,7 @@ def verify_request_count(
if query_params:
query_parameters = {k: {"equalTo": v} for k, v in query_params.items()}
request_body["queryParameters"] = query_parameters
- response = requests.post(f"{wiremock_admin_url}/requests/find", json=request_body)
+ response = httpx.post(f"{wiremock_admin_url}/requests/find", json=request_body)
assert response.status_code == 200, "Failed to query WireMock requests"
result = response.json()
requests_found = len(result.get("requests", []))
diff --git a/tests/wire/test_empathicVoice_chatGroups.py b/tests/wire/test_empathicVoice_chatGroups.py
index 74162113..68459a37 100644
--- a/tests/wire/test_empathicVoice_chatGroups.py
+++ b/tests/wire/test_empathicVoice_chatGroups.py
@@ -6,18 +6,13 @@ def test_empathicVoice_chatGroups_list_chat_groups() -> None:
test_id = "empathic_voice.chat_groups.list_chat_groups.0"
client = get_client(test_id)
client.empathic_voice.chat_groups.list_chat_groups(
- page_number=0, page_size=1, ascending_order=True, config_id="1b60e1a0-cc59-424a-8d2c-189d354db3f3"
+ page_number=0, page_size=1, ascending_order=True, config_id="your-config-id"
)
verify_request_count(
test_id,
"GET",
"/v0/evi/chat_groups",
- {
- "page_number": "0",
- "page_size": "1",
- "ascending_order": "true",
- "config_id": "1b60e1a0-cc59-424a-8d2c-189d354db3f3",
- },
+ {"page_number": "0", "page_size": "1", "ascending_order": "true", "config_id": "your-config-id"},
1,
)
@@ -27,12 +22,12 @@ def test_empathicVoice_chatGroups_get_chat_group() -> None:
test_id = "empathic_voice.chat_groups.get_chat_group.0"
client = get_client(test_id)
client.empathic_voice.chat_groups.get_chat_group(
- id="697056f0-6c7e-487d-9bd8-9c19df79f05f", page_number=0, page_size=1, ascending_order=True
+ id="your-chat-group-id", page_number=0, page_size=1, ascending_order=True
)
verify_request_count(
test_id,
"GET",
- "/v0/evi/chat_groups/697056f0-6c7e-487d-9bd8-9c19df79f05f",
+ "/v0/evi/chat_groups/your-chat-group-id",
{"page_number": "0", "page_size": "1", "ascending_order": "true"},
1,
)
@@ -43,12 +38,12 @@ def test_empathicVoice_chatGroups_get_audio() -> None:
test_id = "empathic_voice.chat_groups.get_audio.0"
client = get_client(test_id)
client.empathic_voice.chat_groups.get_audio(
- id="369846cf-6ad5-404d-905e-a8acb5cdfc78", page_number=0, page_size=10, ascending_order=True
+ id="your-chat-group-id", page_number=0, page_size=10, ascending_order=True
)
verify_request_count(
test_id,
"GET",
- "/v0/evi/chat_groups/369846cf-6ad5-404d-905e-a8acb5cdfc78/audio",
+ "/v0/evi/chat_groups/your-chat-group-id/audio",
{"page_number": "0", "page_size": "10", "ascending_order": "true"},
1,
)
@@ -59,12 +54,12 @@ def test_empathicVoice_chatGroups_list_chat_group_events() -> None:
test_id = "empathic_voice.chat_groups.list_chat_group_events.0"
client = get_client(test_id)
client.empathic_voice.chat_groups.list_chat_group_events(
- id="697056f0-6c7e-487d-9bd8-9c19df79f05f", page_number=0, page_size=3, ascending_order=True
+ id="your-chat-group-id", page_number=0, page_size=3, ascending_order=True
)
verify_request_count(
test_id,
"GET",
- "/v0/evi/chat_groups/697056f0-6c7e-487d-9bd8-9c19df79f05f/events",
+ "/v0/evi/chat_groups/your-chat-group-id/events",
{"page_number": "0", "page_size": "3", "ascending_order": "true"},
1,
)
diff --git a/tests/wire/test_empathicVoice_chats.py b/tests/wire/test_empathicVoice_chats.py
index 47e57ec4..36e3fa66 100644
--- a/tests/wire/test_empathicVoice_chats.py
+++ b/tests/wire/test_empathicVoice_chats.py
@@ -15,13 +15,11 @@ def test_empathicVoice_chats_list_chat_events() -> None:
"""Test list-chat-events endpoint with WireMock"""
test_id = "empathic_voice.chats.list_chat_events.0"
client = get_client(test_id)
- client.empathic_voice.chats.list_chat_events(
- id="470a49f6-1dec-4afe-8b61-035d3b2d63b0", page_number=0, page_size=3, ascending_order=True
- )
+ client.empathic_voice.chats.list_chat_events(id="your-chat-id", page_number=0, page_size=3, ascending_order=True)
verify_request_count(
test_id,
"GET",
- "/v0/evi/chats/470a49f6-1dec-4afe-8b61-035d3b2d63b0",
+ "/v0/evi/chats/your-chat-id",
{"page_number": "0", "page_size": "3", "ascending_order": "true"},
1,
)
@@ -31,5 +29,5 @@ def test_empathicVoice_chats_get_audio() -> None:
"""Test get-audio endpoint with WireMock"""
test_id = "empathic_voice.chats.get_audio.0"
client = get_client(test_id)
- client.empathic_voice.chats.get_audio(id="470a49f6-1dec-4afe-8b61-035d3b2d63b0")
- verify_request_count(test_id, "GET", "/v0/evi/chats/470a49f6-1dec-4afe-8b61-035d3b2d63b0/audio", None, 1)
+ client.empathic_voice.chats.get_audio(id="your-chat-id")
+ verify_request_count(test_id, "GET", "/v0/evi/chats/your-chat-id/audio", None, 1)
diff --git a/tests/wire/test_empathicVoice_configs.py b/tests/wire/test_empathicVoice_configs.py
index 7a0c16be..3608dca6 100644
--- a/tests/wire/test_empathicVoice_configs.py
+++ b/tests/wire/test_empathicVoice_configs.py
@@ -15,7 +15,7 @@ def test_empathicVoice_configs_create_config() -> None:
client = get_client(test_id)
client.empathic_voice.configs.create_config(
name="Weather Assistant Config",
- prompt={"id": "af699d45-2985-42cc-91b9-af9e5da3bac5", "version": 0},
+ prompt={"id": "", "version": 0},
evi_version="3",
voice={"provider": "HUME_AI"},
language_model={"model_provider": "ANTHROPIC", "model_resource": "claude-3-7-sonnet-latest", "temperature": 1},
@@ -32,8 +32,8 @@ def test_empathicVoice_configs_list_config_versions() -> None:
"""Test list-config-versions endpoint with WireMock"""
test_id = "empathic_voice.configs.list_config_versions.0"
client = get_client(test_id)
- client.empathic_voice.configs.list_config_versions(id="1b60e1a0-cc59-424a-8d2c-189d354db3f3")
- verify_request_count(test_id, "GET", "/v0/evi/configs/1b60e1a0-cc59-424a-8d2c-189d354db3f3", None, 1)
+ client.empathic_voice.configs.list_config_versions(id="your-config-id")
+ verify_request_count(test_id, "GET", "/v0/evi/configs/your-config-id", None, 1)
def test_empathicVoice_configs_create_config_version() -> None:
@@ -41,10 +41,10 @@ def test_empathicVoice_configs_create_config_version() -> None:
test_id = "empathic_voice.configs.create_config_version.0"
client = get_client(test_id)
client.empathic_voice.configs.create_config_version(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
+ id="your-config-id",
version_description="This is an updated version of the Weather Assistant Config.",
evi_version="3",
- prompt={"id": "af699d45-2985-42cc-91b9-af9e5da3bac5", "version": 0},
+ prompt={"id": "", "version": 0},
voice={"provider": "HUME_AI"},
language_model={"model_provider": "ANTHROPIC", "model_resource": "claude-3-7-sonnet-latest", "temperature": 1},
ellm_model={"allow_short_responses": True},
@@ -54,41 +54,39 @@ def test_empathicVoice_configs_create_config_version() -> None:
"on_max_duration_timeout": {"enabled": False, "text": ""},
},
)
- verify_request_count(test_id, "POST", "/v0/evi/configs/1b60e1a0-cc59-424a-8d2c-189d354db3f3", None, 1)
+ verify_request_count(test_id, "POST", "/v0/evi/configs/your-config-id", None, 1)
def test_empathicVoice_configs_delete_config() -> None:
"""Test delete-config endpoint with WireMock"""
test_id = "empathic_voice.configs.delete_config.0"
client = get_client(test_id)
- client.empathic_voice.configs.delete_config(id="1b60e1a0-cc59-424a-8d2c-189d354db3f3")
- verify_request_count(test_id, "DELETE", "/v0/evi/configs/1b60e1a0-cc59-424a-8d2c-189d354db3f3", None, 1)
+ client.empathic_voice.configs.delete_config(id="your-config-id")
+ verify_request_count(test_id, "DELETE", "/v0/evi/configs/your-config-id", None, 1)
def test_empathicVoice_configs_update_config_name() -> None:
"""Test update-config-name endpoint with WireMock"""
test_id = "empathic_voice.configs.update_config_name.0"
client = get_client(test_id)
- client.empathic_voice.configs.update_config_name(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3", name="Updated Weather Assistant Config Name"
- )
- verify_request_count(test_id, "PATCH", "/v0/evi/configs/1b60e1a0-cc59-424a-8d2c-189d354db3f3", None, 1)
+ client.empathic_voice.configs.update_config_name(id="your-config-id", name="Updated Weather Assistant Config Name")
+ verify_request_count(test_id, "PATCH", "/v0/evi/configs/your-config-id", None, 1)
def test_empathicVoice_configs_get_config_version() -> None:
"""Test get-config-version endpoint with WireMock"""
test_id = "empathic_voice.configs.get_config_version.0"
client = get_client(test_id)
- client.empathic_voice.configs.get_config_version(id="1b60e1a0-cc59-424a-8d2c-189d354db3f3", version=1)
- verify_request_count(test_id, "GET", "/v0/evi/configs/1b60e1a0-cc59-424a-8d2c-189d354db3f3/version/1", None, 1)
+ client.empathic_voice.configs.get_config_version(id="your-config-id", version=1)
+ verify_request_count(test_id, "GET", "/v0/evi/configs/your-config-id/version/1", None, 1)
def test_empathicVoice_configs_delete_config_version() -> None:
"""Test delete-config-version endpoint with WireMock"""
test_id = "empathic_voice.configs.delete_config_version.0"
client = get_client(test_id)
- client.empathic_voice.configs.delete_config_version(id="1b60e1a0-cc59-424a-8d2c-189d354db3f3", version=1)
- verify_request_count(test_id, "DELETE", "/v0/evi/configs/1b60e1a0-cc59-424a-8d2c-189d354db3f3/version/1", None, 1)
+ client.empathic_voice.configs.delete_config_version(id="your-config-id", version=1)
+ verify_request_count(test_id, "DELETE", "/v0/evi/configs/your-config-id/version/1", None, 1)
def test_empathicVoice_configs_update_config_description() -> None:
@@ -96,8 +94,6 @@ def test_empathicVoice_configs_update_config_description() -> None:
test_id = "empathic_voice.configs.update_config_description.0"
client = get_client(test_id)
client.empathic_voice.configs.update_config_description(
- id="1b60e1a0-cc59-424a-8d2c-189d354db3f3",
- version=1,
- version_description="This is an updated version_description.",
+ id="your-config-id", version=1, version_description="This is an updated version_description."
)
- verify_request_count(test_id, "PATCH", "/v0/evi/configs/1b60e1a0-cc59-424a-8d2c-189d354db3f3/version/1", None, 1)
+ verify_request_count(test_id, "PATCH", "/v0/evi/configs/your-config-id/version/1", None, 1)
diff --git a/tests/wire/test_empathicVoice_prompts.py b/tests/wire/test_empathicVoice_prompts.py
index 3dc25e7f..4ac57a84 100644
--- a/tests/wire/test_empathicVoice_prompts.py
+++ b/tests/wire/test_empathicVoice_prompts.py
@@ -24,8 +24,8 @@ def test_empathicVoice_prompts_list_prompt_versions() -> None:
"""Test list-prompt-versions endpoint with WireMock"""
test_id = "empathic_voice.prompts.list_prompt_versions.0"
client = get_client(test_id)
- client.empathic_voice.prompts.list_prompt_versions(id="af699d45-2985-42cc-91b9-af9e5da3bac5")
- verify_request_count(test_id, "GET", "/v0/evi/prompts/af699d45-2985-42cc-91b9-af9e5da3bac5", None, 1)
+ client.empathic_voice.prompts.list_prompt_versions(id="your-prompt-id")
+ verify_request_count(test_id, "GET", "/v0/evi/prompts/your-prompt-id", None, 1)
def test_empathicVoice_prompts_create_prompt_version() -> None:
@@ -33,45 +33,43 @@ def test_empathicVoice_prompts_create_prompt_version() -> None:
test_id = "empathic_voice.prompts.create_prompt_version.0"
client = get_client(test_id)
client.empathic_voice.prompts.create_prompt_version(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
+ id="your-prompt-id",
text="You are an updated version of an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.",
version_description="This is an updated version of the Weather Assistant Prompt.",
)
- verify_request_count(test_id, "POST", "/v0/evi/prompts/af699d45-2985-42cc-91b9-af9e5da3bac5", None, 1)
+ verify_request_count(test_id, "POST", "/v0/evi/prompts/your-prompt-id", None, 1)
def test_empathicVoice_prompts_delete_prompt() -> None:
"""Test delete-prompt endpoint with WireMock"""
test_id = "empathic_voice.prompts.delete_prompt.0"
client = get_client(test_id)
- client.empathic_voice.prompts.delete_prompt(id="af699d45-2985-42cc-91b9-af9e5da3bac5")
- verify_request_count(test_id, "DELETE", "/v0/evi/prompts/af699d45-2985-42cc-91b9-af9e5da3bac5", None, 1)
+ client.empathic_voice.prompts.delete_prompt(id="your-prompt-id")
+ verify_request_count(test_id, "DELETE", "/v0/evi/prompts/your-prompt-id", None, 1)
def test_empathicVoice_prompts_update_prompt_name() -> None:
"""Test update-prompt-name endpoint with WireMock"""
test_id = "empathic_voice.prompts.update_prompt_name.0"
client = get_client(test_id)
- client.empathic_voice.prompts.update_prompt_name(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5", name="Updated Weather Assistant Prompt Name"
- )
- verify_request_count(test_id, "PATCH", "/v0/evi/prompts/af699d45-2985-42cc-91b9-af9e5da3bac5", None, 1)
+ client.empathic_voice.prompts.update_prompt_name(id="your-prompt-id", name="Updated Weather Assistant Prompt Name")
+ verify_request_count(test_id, "PATCH", "/v0/evi/prompts/your-prompt-id", None, 1)
def test_empathicVoice_prompts_get_prompt_version() -> None:
"""Test get-prompt-version endpoint with WireMock"""
test_id = "empathic_voice.prompts.get_prompt_version.0"
client = get_client(test_id)
- client.empathic_voice.prompts.get_prompt_version(id="af699d45-2985-42cc-91b9-af9e5da3bac5", version=0)
- verify_request_count(test_id, "GET", "/v0/evi/prompts/af699d45-2985-42cc-91b9-af9e5da3bac5/version/0", None, 1)
+ client.empathic_voice.prompts.get_prompt_version(id="your-prompt-id", version=0)
+ verify_request_count(test_id, "GET", "/v0/evi/prompts/your-prompt-id/version/0", None, 1)
def test_empathicVoice_prompts_delete_prompt_version() -> None:
"""Test delete-prompt-version endpoint with WireMock"""
test_id = "empathic_voice.prompts.delete_prompt_version.0"
client = get_client(test_id)
- client.empathic_voice.prompts.delete_prompt_version(id="af699d45-2985-42cc-91b9-af9e5da3bac5", version=1)
- verify_request_count(test_id, "DELETE", "/v0/evi/prompts/af699d45-2985-42cc-91b9-af9e5da3bac5/version/1", None, 1)
+ client.empathic_voice.prompts.delete_prompt_version(id="your-prompt-id", version=1)
+ verify_request_count(test_id, "DELETE", "/v0/evi/prompts/your-prompt-id/version/1", None, 1)
def test_empathicVoice_prompts_update_prompt_description() -> None:
@@ -79,8 +77,6 @@ def test_empathicVoice_prompts_update_prompt_description() -> None:
test_id = "empathic_voice.prompts.update_prompt_description.0"
client = get_client(test_id)
client.empathic_voice.prompts.update_prompt_description(
- id="af699d45-2985-42cc-91b9-af9e5da3bac5",
- version=1,
- version_description="This is an updated version_description.",
+ id="your-prompt-id", version=1, version_description="This is an updated version_description."
)
- verify_request_count(test_id, "PATCH", "/v0/evi/prompts/af699d45-2985-42cc-91b9-af9e5da3bac5/version/1", None, 1)
+ verify_request_count(test_id, "PATCH", "/v0/evi/prompts/your-prompt-id/version/1", None, 1)
diff --git a/tests/wire/test_empathicVoice_tools.py b/tests/wire/test_empathicVoice_tools.py
index d96cfc3a..8d4300ab 100644
--- a/tests/wire/test_empathicVoice_tools.py
+++ b/tests/wire/test_empathicVoice_tools.py
@@ -27,8 +27,8 @@ def test_empathicVoice_tools_list_tool_versions() -> None:
"""Test list-tool-versions endpoint with WireMock"""
test_id = "empathic_voice.tools.list_tool_versions.0"
client = get_client(test_id)
- client.empathic_voice.tools.list_tool_versions(id="00183a3f-79ba-413d-9f3b-609864268bea")
- verify_request_count(test_id, "GET", "/v0/evi/tools/00183a3f-79ba-413d-9f3b-609864268bea", None, 1)
+ client.empathic_voice.tools.list_tool_versions(id="your-tool-id")
+ verify_request_count(test_id, "GET", "/v0/evi/tools/your-tool-id", None, 1)
def test_empathicVoice_tools_create_tool_version() -> None:
@@ -36,47 +36,45 @@ def test_empathicVoice_tools_create_tool_version() -> None:
test_id = "empathic_voice.tools.create_tool_version.0"
client = get_client(test_id)
client.empathic_voice.tools.create_tool_version(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
parameters='{ "type": "object", "properties": { "location": { "type": "string", "description": "The city and state, e.g. San Francisco, CA" }, "format": { "type": "string", "enum": ["celsius", "fahrenheit", "kelvin"], "description": "The temperature unit to use. Infer this from the users location." } }, "required": ["location", "format"] }',
version_description="Fetches current weather and uses celsius, fahrenheit, or kelvin based on location of user.",
fallback_content="Unable to fetch current weather.",
description="This tool is for getting the current weather.",
)
- verify_request_count(test_id, "POST", "/v0/evi/tools/00183a3f-79ba-413d-9f3b-609864268bea", None, 1)
+ verify_request_count(test_id, "POST", "/v0/evi/tools/your-tool-id", None, 1)
def test_empathicVoice_tools_delete_tool() -> None:
"""Test delete-tool endpoint with WireMock"""
test_id = "empathic_voice.tools.delete_tool.0"
client = get_client(test_id)
- client.empathic_voice.tools.delete_tool(id="00183a3f-79ba-413d-9f3b-609864268bea")
- verify_request_count(test_id, "DELETE", "/v0/evi/tools/00183a3f-79ba-413d-9f3b-609864268bea", None, 1)
+ client.empathic_voice.tools.delete_tool(id="your-tool-id")
+ verify_request_count(test_id, "DELETE", "/v0/evi/tools/your-tool-id", None, 1)
def test_empathicVoice_tools_update_tool_name() -> None:
"""Test update-tool-name endpoint with WireMock"""
test_id = "empathic_voice.tools.update_tool_name.0"
client = get_client(test_id)
- client.empathic_voice.tools.update_tool_name(
- id="00183a3f-79ba-413d-9f3b-609864268bea", name="get_current_temperature"
- )
- verify_request_count(test_id, "PATCH", "/v0/evi/tools/00183a3f-79ba-413d-9f3b-609864268bea", None, 1)
+ client.empathic_voice.tools.update_tool_name(id="your-tool-id", name="get_current_temperature")
+ verify_request_count(test_id, "PATCH", "/v0/evi/tools/your-tool-id", None, 1)
def test_empathicVoice_tools_get_tool_version() -> None:
"""Test get-tool-version endpoint with WireMock"""
test_id = "empathic_voice.tools.get_tool_version.0"
client = get_client(test_id)
- client.empathic_voice.tools.get_tool_version(id="00183a3f-79ba-413d-9f3b-609864268bea", version=1)
- verify_request_count(test_id, "GET", "/v0/evi/tools/00183a3f-79ba-413d-9f3b-609864268bea/version/1", None, 1)
+ client.empathic_voice.tools.get_tool_version(id="your-tool-id", version=1)
+ verify_request_count(test_id, "GET", "/v0/evi/tools/your-tool-id/version/1", None, 1)
def test_empathicVoice_tools_delete_tool_version() -> None:
"""Test delete-tool-version endpoint with WireMock"""
test_id = "empathic_voice.tools.delete_tool_version.0"
client = get_client(test_id)
- client.empathic_voice.tools.delete_tool_version(id="00183a3f-79ba-413d-9f3b-609864268bea", version=1)
- verify_request_count(test_id, "DELETE", "/v0/evi/tools/00183a3f-79ba-413d-9f3b-609864268bea/version/1", None, 1)
+ client.empathic_voice.tools.delete_tool_version(id="", version=1)
+ verify_request_count(test_id, "DELETE", "/v0/evi/tools/{id}/version/1", None, 1)
def test_empathicVoice_tools_update_tool_description() -> None:
@@ -84,8 +82,8 @@ def test_empathicVoice_tools_update_tool_description() -> None:
test_id = "empathic_voice.tools.update_tool_description.0"
client = get_client(test_id)
client.empathic_voice.tools.update_tool_description(
- id="00183a3f-79ba-413d-9f3b-609864268bea",
+ id="your-tool-id",
version=1,
version_description="Fetches current temperature, precipitation, wind speed, AQI, and other weather conditions. Uses Celsius, Fahrenheit, or kelvin depending on user's region.",
)
- verify_request_count(test_id, "PATCH", "/v0/evi/tools/00183a3f-79ba-413d-9f3b-609864268bea/version/1", None, 1)
+ verify_request_count(test_id, "PATCH", "/v0/evi/tools/your-tool-id/version/1", None, 1)
diff --git a/tests/wire/test_tts.py b/tests/wire/test_tts.py
index d75b4d2f..40f8bf0a 100644
--- a/tests/wire/test_tts.py
+++ b/tests/wire/test_tts.py
@@ -24,7 +24,7 @@ def test_tts_synthesize_file() -> None:
test_id = "tts.synthesize_file.0"
client = get_client(test_id)
for _ in client.tts.synthesize_file(
- context={"generation_id": "09ad914d-8e7f-40f8-a279-e34f07f7dab2"},
+ context={"generation_id": ""},
format={"type": "mp3"},
num_generations=1,
utterances=[
diff --git a/tests/wire/test_tts_voices.py b/tests/wire/test_tts_voices.py
index 5b8b6c45..00b5242a 100644
--- a/tests/wire/test_tts_voices.py
+++ b/tests/wire/test_tts_voices.py
@@ -13,7 +13,7 @@ def test_tts_voices_create() -> None:
"""Test create endpoint with WireMock"""
test_id = "tts.voices.create.0"
client = get_client(test_id)
- client.tts.voices.create(generation_id="795c949a-1510-4a80-9646-7d0863b023ab", name="David Hume")
+ client.tts.voices.create(generation_id="", name="David Hume")
verify_request_count(test_id, "POST", "/v0/tts/voices", None, 1)
diff --git a/wiremock/wiremock-mappings.json b/wiremock/wiremock-mappings.json
index b4ee6ce6..8081e5cc 100644
--- a/wiremock/wiremock-mappings.json
+++ b/wiremock/wiremock-mappings.json
@@ -1 +1 @@
-{"mappings":[{"id":"1d60b8ea-f512-4ce0-92ad-0a086a4717a2","name":"List voices - default","request":{"urlPathTemplate":"/v0/tts/voices","method":"GET"},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 10,\n \"total_pages\": 1,\n \"voices_page\": [\n {\n \"id\": \"c42352c0-4566-455d-b180-0f654b65b525\",\n \"name\": \"David Hume\",\n \"provider\": \"CUSTOM_VOICE\"\n },\n {\n \"id\": \"d87352b0-26a3-4b11-081b-d157a5674d19\",\n \"name\": \"Goliath Hume\",\n \"provider\": \"CUSTOM_VOICE\"\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"1d60b8ea-f512-4ce0-92ad-0a086a4717a2","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"2c434990-d686-4fec-940d-7b86327bf9d5","name":"Create voice - default","request":{"urlPathTemplate":"/v0/tts/voices","method":"POST"},"response":{"status":200,"body":"{\n \"id\": \"c42352c0-4566-455d-b180-0f654b65b525\",\n \"name\": \"David Hume\",\n \"provider\": \"CUSTOM_VOICE\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"2c434990-d686-4fec-940d-7b86327bf9d5","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"69163bef-50c2-4c89-b4f9-70f5a04bfec8","name":"Delete voice - default","request":{"urlPathTemplate":"/v0/tts/voices","method":"DELETE"},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"69163bef-50c2-4c89-b4f9-70f5a04bfec8","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"1a9089ce-a462-43bb-afa5-ed028dd296e5","name":"Text-to-Speech (Json) - default","request":{"urlPathTemplate":"/v0/tts","method":"POST"},"response":{"status":200,"body":"{\n \"generations\": [\n {\n \"audio\": \"//PExAA0DDYRvkpNfhv3JI5JZ...etc.\",\n \"duration\": 7.44225,\n \"encoding\": {\n \"format\": \"mp3\",\n \"sample_rate\": 48000\n },\n \"file_size\": 120192,\n \"generation_id\": \"795c949a-1510-4a80-9646-7d0863b023ab\",\n \"snippets\": [\n [\n {\n \"audio\": \"//PExAA0DDYRvkpNfhv3JI5JZ...etc.\",\n \"generation_id\": \"795c949a-1510-4a80-9646-7d0863b023ab\",\n \"id\": \"37b1b1b1-1b1b-1b1b-1b1b-1b1b1b1b1b1b\",\n \"text\": \"Beauty is no quality in things themselves: It exists merely in the mind which contemplates them.\",\n \"utterance_index\": 0,\n \"timestamps\": []\n }\n ]\n ]\n }\n ],\n \"request_id\": \"66e01f90-4501-4aa0-bbaf-74f45dc15aa725906\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"1a9089ce-a462-43bb-afa5-ed028dd296e5","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"449bb149-6027-4735-a265-0a0a5bc0d0ef","name":"Text-to-Speech (File) - default","request":{"urlPathTemplate":"/v0/tts/file","method":"POST"},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"449bb149-6027-4735-a265-0a0a5bc0d0ef","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"fd6bbe04-a38a-4d6e-bc3a-f8cf25725dbe","name":"Text-to-Speech (Streamed File) - default","request":{"urlPathTemplate":"/v0/tts/stream/file","method":"POST"},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"fd6bbe04-a38a-4d6e-bc3a-f8cf25725dbe","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"8e8e8262-38fd-4f81-836b-c9d7ee07cd84","name":"Text-to-Speech (Streamed JSON) - default","request":{"urlPathTemplate":"/v0/tts/stream/json","method":"POST"},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"8e8e8262-38fd-4f81-836b-c9d7ee07cd84","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"f75e0a57-c1d9-44df-80c7-c8485a7a76d6","name":"Voice Conversion (Streamed JSON) - default","request":{"urlPathTemplate":"/v0/tts/voice_conversion/json","method":"POST"},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"f75e0a57-c1d9-44df-80c7-c8485a7a76d6","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"e0fc3f32-35c4-4c49-89f2-eaa4e21b9444","name":"Send Message - default","request":{"urlPathTemplate":"/v0/evi/chat/{chat_id}/send","method":"POST","pathParameters":{"chat_id":{"equalTo":"chat_id"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"e0fc3f32-35c4-4c49-89f2-eaa4e21b9444","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"7e5b16a7-b7b9-4e7d-b0a9-61b1ac4b5f7d","name":"List chat_groups - default","request":{"urlPathTemplate":"/v0/evi/chat_groups","method":"GET"},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 1,\n \"total_pages\": 1,\n \"pagination_direction\": \"ASC\",\n \"chat_groups_page\": [\n {\n \"id\": \"697056f0-6c7e-487d-9bd8-9c19df79f05f\",\n \"first_start_timestamp\": 1721844196397,\n \"most_recent_start_timestamp\": 1721861821717,\n \"active\": false,\n \"most_recent_chat_id\": \"dfdbdd4d-0ddf-418b-8fc4-80a266579d36\",\n \"num_chats\": 5\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"7e5b16a7-b7b9-4e7d-b0a9-61b1ac4b5f7d","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"52c3c012-9681-44fd-b0dd-d644aac44f8c","name":"Get chat_group - default","request":{"urlPathTemplate":"/v0/evi/chat_groups/{id}","method":"GET","pathParameters":{"id":{"equalTo":"697056f0-6c7e-487d-9bd8-9c19df79f05f"}}},"response":{"status":200,"body":"{\n \"id\": \"369846cf-6ad5-404d-905e-a8acb5cdfc78\",\n \"first_start_timestamp\": 1712334213647,\n \"most_recent_start_timestamp\": 1712334213647,\n \"num_chats\": 1,\n \"page_number\": 0,\n \"page_size\": 1,\n \"total_pages\": 1,\n \"pagination_direction\": \"ASC\",\n \"chats_page\": [\n {\n \"id\": \"6375d4f8-cd3e-4d6b-b13b-ace66b7c8aaa\",\n \"chat_group_id\": \"369846cf-6ad5-404d-905e-a8acb5cdfc78\",\n \"status\": \"USER_ENDED\",\n \"start_timestamp\": 1712334213647,\n \"end_timestamp\": 1712334332571,\n \"event_count\": 0,\n \"metadata\": null,\n \"config\": null\n }\n ],\n \"active\": false\n}","headers":{"Content-Type":"application/json"}},"uuid":"52c3c012-9681-44fd-b0dd-d644aac44f8c","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"ef5bd433-148e-481b-b653-7b4676a3fbbb","name":"Get chat group audio - default","request":{"urlPathTemplate":"/v0/evi/chat_groups/{id}/audio","method":"GET","pathParameters":{"id":{"equalTo":"369846cf-6ad5-404d-905e-a8acb5cdfc78"}}},"response":{"status":200,"body":"{\n \"id\": \"369846cf-6ad5-404d-905e-a8acb5cdfc78\",\n \"user_id\": \"e6235940-cfda-3988-9147-ff531627cf42\",\n \"num_chats\": 1,\n \"page_number\": 0,\n \"page_size\": 10,\n \"total_pages\": 1,\n \"pagination_direction\": \"ASC\",\n \"audio_reconstructions_page\": [\n {\n \"id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"user_id\": \"e6235940-cfda-3988-9147-ff531627cf42\",\n \"status\": \"COMPLETE\",\n \"filename\": \"e6235940-cfda-3988-9147-ff531627cf42/470a49f6-1dec-4afe-8b61-035d3b2d63b0/reconstructed_audio.mp4\",\n \"modified_at\": 1729875432555,\n \"signed_audio_url\": \"https://storage.googleapis.com/...etc.\",\n \"signed_url_expiration_timestamp_millis\": 1730232816964\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"ef5bd433-148e-481b-b653-7b4676a3fbbb","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"63f657bf-ebac-4bc4-bbae-5a46d2babec4","name":"List chat events from a specific chat_group - default","request":{"urlPathTemplate":"/v0/evi/chat_groups/{id}/events","method":"GET","pathParameters":{"id":{"equalTo":"697056f0-6c7e-487d-9bd8-9c19df79f05f"}}},"response":{"status":200,"body":"{\n \"id\": \"697056f0-6c7e-487d-9bd8-9c19df79f05f\",\n \"page_number\": 0,\n \"page_size\": 3,\n \"total_pages\": 1,\n \"pagination_direction\": \"ASC\",\n \"events_page\": [\n {\n \"id\": \"5d44bdbb-49a3-40fb-871d-32bf7e76efe7\",\n \"chat_id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"timestamp\": 1716244940762,\n \"role\": \"SYSTEM\",\n \"type\": \"SYSTEM_PROMPT\",\n \"message_text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\",\n \"emotion_features\": \"\",\n \"metadata\": \"\"\n },\n {\n \"id\": \"5976ddf6-d093-4bb9-ba60-8f6c25832dde\",\n \"chat_id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"timestamp\": 1716244956278,\n \"role\": \"USER\",\n \"type\": \"USER_MESSAGE\",\n \"message_text\": \"Hello.\",\n \"emotion_features\": \"{\\\"Admiration\\\": 0.09906005859375, \\\"Adoration\\\": 0.12213134765625, \\\"Aesthetic Appreciation\\\": 0.05035400390625, \\\"Amusement\\\": 0.16552734375, \\\"Anger\\\": 0.0037384033203125, \\\"Anxiety\\\": 0.010101318359375, \\\"Awe\\\": 0.058197021484375, \\\"Awkwardness\\\": 0.10552978515625, \\\"Boredom\\\": 0.1141357421875, \\\"Calmness\\\": 0.115234375, \\\"Concentration\\\": 0.00444793701171875, \\\"Confusion\\\": 0.0343017578125, \\\"Contemplation\\\": 0.00812530517578125, \\\"Contempt\\\": 0.009002685546875, \\\"Contentment\\\": 0.087158203125, \\\"Craving\\\": 0.00818634033203125, \\\"Desire\\\": 0.018310546875, \\\"Determination\\\": 0.003238677978515625, \\\"Disappointment\\\": 0.024169921875, \\\"Disgust\\\": 0.00702667236328125, \\\"Distress\\\": 0.00936126708984375, \\\"Doubt\\\": 0.00632476806640625, \\\"Ecstasy\\\": 0.0293731689453125, \\\"Embarrassment\\\": 0.01800537109375, \\\"Empathic Pain\\\": 0.0088348388671875, \\\"Entrancement\\\": 0.013397216796875, \\\"Envy\\\": 0.02557373046875, \\\"Excitement\\\": 0.12109375, \\\"Fear\\\": 0.004413604736328125, \\\"Guilt\\\": 0.016571044921875, \\\"Horror\\\": 0.00274658203125, \\\"Interest\\\": 0.2142333984375, \\\"Joy\\\": 0.29638671875, \\\"Love\\\": 0.16015625, \\\"Nostalgia\\\": 0.007843017578125, \\\"Pain\\\": 0.007160186767578125, \\\"Pride\\\": 0.00508880615234375, \\\"Realization\\\": 0.054229736328125, \\\"Relief\\\": 0.048736572265625, \\\"Romance\\\": 0.026397705078125, \\\"Sadness\\\": 0.0265350341796875, \\\"Satisfaction\\\": 0.051361083984375, \\\"Shame\\\": 0.00974273681640625, \\\"Surprise (negative)\\\": 0.0218963623046875, \\\"Surprise (positive)\\\": 0.216064453125, \\\"Sympathy\\\": 0.021728515625, \\\"Tiredness\\\": 0.0173797607421875, \\\"Triumph\\\": 0.004520416259765625}\",\n \"metadata\": \"{\\\"segments\\\": [{\\\"content\\\": \\\"Hello.\\\", \\\"embedding\\\": [0.6181640625, 0.1763916015625, -30.921875, 1.2705078125, 0.927734375, 0.63720703125, 2.865234375, 0.1080322265625, 0.2978515625, 1.0107421875, 1.34375, 0.74560546875, 0.416259765625, 0.99462890625, -0.333740234375, 0.361083984375, -1.388671875, 1.0107421875, 1.3173828125, 0.55615234375, 0.541015625, -0.1837158203125, 1.697265625, 0.228515625, 2.087890625, -0.311767578125, 0.053680419921875, 1.3349609375, 0.95068359375, 0.00441741943359375, 0.705078125, 1.8916015625, -0.939453125, 0.93701171875, -0.28955078125, 1.513671875, 0.5595703125, 1.0126953125, -0.1624755859375, 1.4072265625, -0.28857421875, -0.4560546875, -0.1500244140625, -0.1102294921875, -0.222412109375, 0.8779296875, 1.275390625, 1.6689453125, 0.80712890625, -0.34814453125, -0.325439453125, 0.412841796875, 0.81689453125, 0.55126953125, 1.671875, 0.6611328125, 0.7451171875, 1.50390625, 1.0224609375, -1.671875, 0.7373046875, 2.1328125, 2.166015625, 0.41015625, -0.127685546875, 1.9345703125, -4.2734375, 0.332275390625, 0.26171875, 0.76708984375, 0.2685546875, 0.468017578125, 1.208984375, -1.517578125, 1.083984375, 0.84814453125, 1.0244140625, -0.0072174072265625, 1.34375, 1.0712890625, 1.517578125, -0.52001953125, 0.59228515625, 0.8154296875, -0.951171875, -0.07757568359375, 1.3330078125, 1.125, 0.61181640625, 1.494140625, 0.357421875, 1.1796875, 1.482421875, 0.8046875, 0.1536865234375, 1.8076171875, 0.68115234375, -15.171875, 1.2294921875, 0.319091796875, 0.499755859375, 1.5771484375, 0.94677734375, -0.2490234375, 0.88525390625, 3.47265625, 0.75927734375, 0.71044921875, 1.2333984375, 1.4169921875, -0.56640625, -1.8095703125, 1.37109375, 0.428955078125, 1.89453125, -0.39013671875, 0.1734619140625, 1.5595703125, -1.2294921875, 2.552734375, 0.58349609375, 0.2156982421875, -0.00984954833984375, -0.6865234375, -0.0272979736328125, -0.2264404296875, 2.853515625, 1.3896484375, 0.52978515625, 0.783203125, 3.0390625, 0.75537109375, 0.219970703125, 0.384521484375, 0.385986328125, 2.0546875, -0.10443115234375, 1.5146484375, 1.4296875, 1.9716796875, 1.1318359375, 0.31591796875, 0.338623046875, 1.654296875, -0.88037109375, -0.21484375, 1.45703125, 1.0380859375, -0.52294921875, -0.47802734375, 0.1650390625, 1.2392578125, -1.138671875, 0.56787109375, 1.318359375, 0.4287109375, 0.1981201171875, 2.4375, 0.281005859375, 0.89404296875, -0.1552734375, 0.6474609375, -0.08331298828125, 0.00740814208984375, -0.045501708984375, -0.578125, 2.02734375, 0.59228515625, 0.35693359375, 1.2919921875, 1.22265625, 1.0537109375, 0.145263671875, 1.05859375, -0.369140625, 0.207275390625, 0.78857421875, 0.599609375, 0.99072265625, 0.24462890625, 1.26953125, 0.08404541015625, 1.349609375, 0.73291015625, 1.3212890625, 0.388916015625, 1.0869140625, 0.9931640625, -1.5673828125, 0.0462646484375, 0.650390625, 0.253662109375, 0.58251953125, 1.8134765625, 0.8642578125, 2.591796875, 0.7314453125, 0.85986328125, 0.5615234375, 0.9296875, 0.04144287109375, 1.66015625, 1.99609375, 1.171875, 1.181640625, 1.5126953125, 0.0224456787109375, 0.58349609375, -1.4931640625, 0.81884765625, 0.732421875, -0.6455078125, -0.62451171875, 1.7802734375, 0.01526641845703125, -0.423095703125, 0.461669921875, 4.87890625, 1.2392578125, -0.6953125, 0.6689453125, 0.62451171875, -1.521484375, 1.7685546875, 0.810546875, 0.65478515625, 0.26123046875, 1.6396484375, 0.87548828125, 1.7353515625, 2.046875, 1.5634765625, 0.69384765625, 1.375, 0.8916015625, 1.0107421875, 0.1304931640625, 2.009765625, 0.06402587890625, -0.08428955078125, 0.04351806640625, -1.7529296875, 2.02734375, 3.521484375, 0.404541015625, 1.6337890625, -0.276611328125, 0.8837890625, -0.1287841796875, 0.91064453125, 0.8193359375, 0.701171875, 0.036529541015625, 1.26171875, 1.0478515625, -0.1422119140625, 1.0634765625, 0.61083984375, 1.3505859375, 1.208984375, 0.57275390625, 1.3623046875, 2.267578125, 0.484375, 0.9150390625, 0.56787109375, -0.70068359375, 0.27587890625, -0.70654296875, 0.8466796875, 0.57568359375, 1.6162109375, 0.87939453125, 2.248046875, -0.5458984375, 1.7744140625, 1.328125, 1.232421875, 0.6806640625, 0.9365234375, 1.052734375, -1.08984375, 1.8330078125, -0.4033203125, 1.0673828125, 0.297607421875, 1.5703125, 1.67578125, 1.34765625, 2.8203125, 2.025390625, -0.48583984375, 0.7626953125, 0.01007843017578125, 1.435546875, 0.007205963134765625, 0.05157470703125, -0.9853515625, 0.26708984375, 1.16796875, 1.2041015625, 1.99609375, -0.07916259765625, 1.244140625, -0.32080078125, 0.6748046875, 0.419921875, 1.3212890625, 1.291015625, 0.599609375, 0.0550537109375, 0.9599609375, 0.93505859375, 0.111083984375, 1.302734375, 0.0833740234375, 2.244140625, 1.25390625, 1.6015625, 0.58349609375, 1.7568359375, -0.263427734375, -0.019866943359375, -0.24658203125, -0.1871337890625, 0.927734375, 0.62255859375, 0.275146484375, 0.79541015625, 1.1796875, 1.1767578125, -0.26123046875, -0.268310546875, 1.8994140625, 1.318359375, 2.1875, 0.2469482421875, 1.41015625, 0.03973388671875, 1.2685546875, 1.1025390625, 0.9560546875, 0.865234375, -1.92578125, 1.154296875, 0.389892578125, 1.130859375, 0.95947265625, 0.72314453125, 2.244140625, 0.048553466796875, 0.626953125, 0.42919921875, 0.82275390625, 0.311767578125, -0.320556640625, 0.01041412353515625, 0.1483154296875, 0.10809326171875, -0.3173828125, 1.1337890625, -0.8642578125, 1.4033203125, 0.048828125, 1.1787109375, 0.98779296875, 1.818359375, 1.1552734375, 0.6015625, 1.2392578125, -1.2685546875, 0.39208984375, 0.83251953125, 0.224365234375, 0.0019989013671875, 0.87548828125, 1.6572265625, 1.107421875, 0.434814453125, 1.8251953125, 0.442626953125, 1.2587890625, 0.09320068359375, -0.896484375, 1.8017578125, 1.451171875, -0.0755615234375, 0.6083984375, 2.06640625, 0.673828125, -0.33740234375, 0.192138671875, 0.21435546875, 0.80224609375, -1.490234375, 0.9501953125, 0.86083984375, -0.40283203125, 4.109375, 2.533203125, 1.2529296875, 0.8271484375, 0.225830078125, 1.0478515625, -1.9755859375, 0.841796875, 0.392822265625, 0.525390625, 0.33935546875, -0.79443359375, 0.71630859375, 0.97998046875, -0.175537109375, 0.97705078125, 1.705078125, 0.29638671875, 0.68359375, 0.54150390625, 0.435791015625, 0.99755859375, -0.369140625, 1.009765625, -0.140380859375, 0.426513671875, 0.189697265625, 1.8193359375, 1.1201171875, -0.5009765625, -0.331298828125, 0.759765625, -0.09442138671875, 0.74609375, -1.947265625, 1.3544921875, -3.935546875, 2.544921875, 1.359375, 0.1363525390625, 0.79296875, 0.79931640625, -0.3466796875, 1.1396484375, -0.33447265625, 2.0078125, -0.241455078125, 0.6318359375, 0.365234375, 0.296142578125, 0.830078125, 1.0458984375, 0.5830078125, 0.61572265625, 14.0703125, -2.0078125, -0.381591796875, 1.228515625, 0.08282470703125, -0.67822265625, -0.04339599609375, 0.397216796875, 0.1656494140625, 0.137451171875, 0.244873046875, 1.1611328125, -1.3818359375, 0.8447265625, 1.171875, 0.36328125, 0.252685546875, 0.1197509765625, 0.232177734375, -0.020172119140625, 0.64404296875, -0.01100921630859375, -1.9267578125, 0.222412109375, 0.56005859375, 1.3046875, 1.1630859375, 1.197265625, 1.02734375, 1.6806640625, -0.043731689453125, 1.4697265625, 0.81201171875, 1.5390625, 1.240234375, -0.7353515625, 1.828125, 1.115234375, 1.931640625, -0.517578125, 0.77880859375, 1.0546875, 0.95361328125, 3.42578125, 0.0160369873046875, 0.875, 0.56005859375, 1.2421875, 1.986328125, 1.4814453125, 0.0948486328125, 1.115234375, 0.00665283203125, 2.09375, 0.3544921875, -0.52783203125, 1.2099609375, 0.45068359375, 0.65625, 0.1112060546875, 1.0751953125, -0.9521484375, -0.30029296875, 1.4462890625, 2.046875, 3.212890625, 1.68359375, 1.07421875, -0.5263671875, 0.74560546875, 1.37890625, 0.15283203125, 0.2440185546875, 0.62646484375, -0.1280517578125, 0.7646484375, -0.515625, -0.35693359375, 1.2958984375, 0.96923828125, 0.58935546875, 1.3701171875, 1.0673828125, 0.2337646484375, 0.93115234375, 0.66357421875, 6.0, 1.1025390625, -0.51708984375, -0.38330078125, 0.7197265625, 0.246826171875, -0.45166015625, 1.9521484375, 0.5546875, 0.08807373046875, 0.18505859375, 0.8857421875, -0.57177734375, 0.251708984375, 0.234375, 2.57421875, 0.9599609375, 0.5029296875, 0.10382080078125, 0.08331298828125, 0.66748046875, -0.349609375, 1.287109375, 0.259765625, 2.015625, 2.828125, -0.3095703125, -0.164306640625, -0.3408203125, 0.486572265625, 0.8466796875, 1.9130859375, 0.09088134765625, 0.66552734375, 0.00972747802734375, -0.83154296875, 1.755859375, 0.654296875, 0.173828125, 0.27587890625, -0.47607421875, -0.264404296875, 0.7529296875, 0.6533203125, 0.7275390625, 0.499755859375, 0.833984375, -0.44775390625, -0.05078125, -0.454833984375, 0.75439453125, 0.68505859375, 0.210693359375, -0.283935546875, -0.53564453125, 0.96826171875, 0.861328125, -3.33984375, -0.26171875, 0.77734375, 0.26513671875, -0.14111328125, -0.042236328125, -0.84814453125, 0.2137451171875, 0.94921875, 0.65185546875, -0.5380859375, 0.1529541015625, -0.360595703125, -0.0333251953125, -0.69189453125, 0.8974609375, 0.7109375, 0.81494140625, -0.259521484375, 1.1904296875, 0.62158203125, 1.345703125, 0.89404296875, 0.70556640625, 1.0673828125, 1.392578125, 0.5068359375, 0.962890625, 0.736328125, 1.55078125, 0.50390625, -0.398681640625, 2.361328125, 0.345947265625, -0.61962890625, 0.330078125, 0.75439453125, -0.673828125, -0.2379150390625, 1.5673828125, 1.369140625, 0.1119384765625, -0.1834716796875, 1.4599609375, -0.77587890625, 0.5556640625, 0.09954833984375, 0.0285186767578125, 0.58935546875, -0.501953125, 0.212890625, 0.02679443359375, 0.1715087890625, 0.03466796875, -0.564453125, 2.029296875, 2.45703125, -0.72216796875, 2.138671875, 0.50830078125, -0.09356689453125, 0.230224609375, 1.6943359375, 1.5126953125, 0.39453125, 0.411376953125, 1.07421875, -0.8046875, 0.51416015625, 0.2271728515625, -0.283447265625, 0.38427734375, 0.73388671875, 0.6962890625, 1.4990234375, 0.02813720703125, 0.40478515625, 1.2451171875, 1.1162109375, -5.5703125, 0.76171875, 0.322021484375, 1.0361328125, 1.197265625, 0.1163330078125, 0.2425537109375, 1.5595703125, 1.5791015625, -0.0921630859375, 0.484619140625, 1.9052734375, 5.31640625, 1.6337890625, 0.95947265625, -0.1751708984375, 0.466552734375, 0.8330078125, 1.03125, 0.2044677734375, 0.31298828125, -1.1220703125, 0.5517578125, 0.93505859375, 0.45166015625, 1.951171875, 0.65478515625, 1.30859375, 1.0859375, 0.56494140625, 2.322265625, 0.242919921875, 1.81640625, -0.469970703125, -0.841796875, 0.90869140625, 1.5361328125, 0.923828125, 1.0595703125, 0.356689453125, -0.46142578125, 2.134765625, 1.3037109375, -0.32373046875, -9.2265625, 0.4521484375, 0.88037109375, -0.53955078125, 0.96484375, 0.7705078125, 0.84521484375, 1.580078125, -0.1448974609375, 0.7607421875, 1.0166015625, -0.086669921875, 1.611328125, 0.05938720703125, 0.5078125, 0.8427734375, 2.431640625, 0.66357421875, 3.203125, 0.132080078125, 0.461181640625, 0.779296875, 1.9482421875, 1.8720703125, 0.845703125, -1.3837890625, -0.138916015625, 0.35546875, 0.2457275390625, 0.75341796875, 1.828125, 1.4169921875, 0.60791015625, 1.0068359375, 1.109375, 0.484130859375, -0.302001953125, 0.4951171875, 0.802734375, 1.9482421875, 0.916015625, 0.1646728515625, 2.599609375, 1.7177734375, -0.2374267578125, 0.98046875, 0.39306640625, -1.1396484375, 1.6533203125, 0.375244140625], \\\"scores\\\": [0.09906005859375, 0.12213134765625, 0.05035400390625, 0.16552734375, 0.0037384033203125, 0.010101318359375, 0.058197021484375, 0.10552978515625, 0.1141357421875, 0.115234375, 0.00444793701171875, 0.00812530517578125, 0.0343017578125, 0.009002685546875, 0.087158203125, 0.00818634033203125, 0.003238677978515625, 0.024169921875, 0.00702667236328125, 0.00936126708984375, 0.00632476806640625, 0.0293731689453125, 0.01800537109375, 0.0088348388671875, 0.013397216796875, 0.02557373046875, 0.12109375, 0.004413604736328125, 0.016571044921875, 0.00274658203125, 0.2142333984375, 0.29638671875, 0.16015625, 0.007843017578125, 0.007160186767578125, 0.00508880615234375, 0.054229736328125, 0.048736572265625, 0.026397705078125, 0.0265350341796875, 0.051361083984375, 0.018310546875, 0.00974273681640625, 0.0218963623046875, 0.216064453125, 0.021728515625, 0.0173797607421875, 0.004520416259765625], \\\"stoks\\\": [52, 52, 52, 52, 52, 41, 41, 374, 303, 303, 303, 427], \\\"time\\\": {\\\"begin_ms\\\": 640, \\\"end_ms\\\": 1140}}]}\"\n },\n {\n \"id\": \"7645a0d1-2e64-410d-83a8-b96040432e9a\",\n \"chat_id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"timestamp\": 1716244957031,\n \"role\": \"AGENT\",\n \"type\": \"AGENT_MESSAGE\",\n \"message_text\": \"Hello!\",\n \"emotion_features\": \"{\\\"Admiration\\\": 0.044921875, \\\"Adoration\\\": 0.0253753662109375, \\\"Aesthetic Appreciation\\\": 0.03265380859375, \\\"Amusement\\\": 0.118408203125, \\\"Anger\\\": 0.06719970703125, \\\"Anxiety\\\": 0.0411376953125, \\\"Awe\\\": 0.03802490234375, \\\"Awkwardness\\\": 0.056549072265625, \\\"Boredom\\\": 0.04217529296875, \\\"Calmness\\\": 0.08709716796875, \\\"Concentration\\\": 0.070556640625, \\\"Confusion\\\": 0.06964111328125, \\\"Contemplation\\\": 0.0343017578125, \\\"Contempt\\\": 0.037689208984375, \\\"Contentment\\\": 0.059417724609375, \\\"Craving\\\": 0.01132965087890625, \\\"Desire\\\": 0.01406097412109375, \\\"Determination\\\": 0.1143798828125, \\\"Disappointment\\\": 0.051177978515625, \\\"Disgust\\\": 0.028594970703125, \\\"Distress\\\": 0.054901123046875, \\\"Doubt\\\": 0.04638671875, \\\"Ecstasy\\\": 0.0258026123046875, \\\"Embarrassment\\\": 0.0222015380859375, \\\"Empathic Pain\\\": 0.015777587890625, \\\"Entrancement\\\": 0.0160980224609375, \\\"Envy\\\": 0.0163421630859375, \\\"Excitement\\\": 0.129638671875, \\\"Fear\\\": 0.03125, \\\"Guilt\\\": 0.01483917236328125, \\\"Horror\\\": 0.0194549560546875, \\\"Interest\\\": 0.1341552734375, \\\"Joy\\\": 0.0738525390625, \\\"Love\\\": 0.0216522216796875, \\\"Nostalgia\\\": 0.0210418701171875, \\\"Pain\\\": 0.020721435546875, \\\"Pride\\\": 0.05499267578125, \\\"Realization\\\": 0.0728759765625, \\\"Relief\\\": 0.04052734375, \\\"Romance\\\": 0.0129241943359375, \\\"Sadness\\\": 0.0254669189453125, \\\"Satisfaction\\\": 0.07159423828125, \\\"Shame\\\": 0.01495361328125, \\\"Surprise (negative)\\\": 0.05560302734375, \\\"Surprise (positive)\\\": 0.07965087890625, \\\"Sympathy\\\": 0.022247314453125, \\\"Tiredness\\\": 0.0194549560546875, \\\"Triumph\\\": 0.04107666015625}\",\n \"metadata\": \"\"\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"63f657bf-ebac-4bc4-bbae-5a46d2babec4","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"741b2853-034c-43df-9eb0-7e4ff5d57dec","name":"List chats - default","request":{"urlPathTemplate":"/v0/evi/chats","method":"GET"},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 1,\n \"total_pages\": 1,\n \"pagination_direction\": \"ASC\",\n \"chats_page\": [\n {\n \"id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"chat_group_id\": \"9fc18597-3567-42d5-94d6-935bde84bf2f\",\n \"status\": \"USER_ENDED\",\n \"start_timestamp\": 1716244940648,\n \"end_timestamp\": 1716244958546,\n \"event_count\": 3,\n \"metadata\": \"\",\n \"config\": {\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 0\n }\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"741b2853-034c-43df-9eb0-7e4ff5d57dec","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"333ce95c-76c7-4621-aa72-bb0ed90fcf50","name":"List chat events - default","request":{"urlPathTemplate":"/v0/evi/chats/{id}","method":"GET","pathParameters":{"id":{"equalTo":"470a49f6-1dec-4afe-8b61-035d3b2d63b0"}}},"response":{"status":200,"body":"{\n \"id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"chat_group_id\": \"9fc18597-3567-42d5-94d6-935bde84bf2f\",\n \"status\": \"USER_ENDED\",\n \"start_timestamp\": 1716244940648,\n \"pagination_direction\": \"ASC\",\n \"events_page\": [\n {\n \"id\": \"5d44bdbb-49a3-40fb-871d-32bf7e76efe7\",\n \"chat_id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"timestamp\": 1716244940762,\n \"role\": \"SYSTEM\",\n \"type\": \"SYSTEM_PROMPT\",\n \"message_text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\",\n \"emotion_features\": \"\",\n \"metadata\": \"\"\n },\n {\n \"id\": \"5976ddf6-d093-4bb9-ba60-8f6c25832dde\",\n \"chat_id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"timestamp\": 1716244956278,\n \"role\": \"USER\",\n \"type\": \"USER_MESSAGE\",\n \"message_text\": \"Hello.\",\n \"emotion_features\": \"{\\\"Admiration\\\": 0.09906005859375, \\\"Adoration\\\": 0.12213134765625, \\\"Aesthetic Appreciation\\\": 0.05035400390625, \\\"Amusement\\\": 0.16552734375, \\\"Anger\\\": 0.0037384033203125, \\\"Anxiety\\\": 0.010101318359375, \\\"Awe\\\": 0.058197021484375, \\\"Awkwardness\\\": 0.10552978515625, \\\"Boredom\\\": 0.1141357421875, \\\"Calmness\\\": 0.115234375, \\\"Concentration\\\": 0.00444793701171875, \\\"Confusion\\\": 0.0343017578125, \\\"Contemplation\\\": 0.00812530517578125, \\\"Contempt\\\": 0.009002685546875, \\\"Contentment\\\": 0.087158203125, \\\"Craving\\\": 0.00818634033203125, \\\"Desire\\\": 0.018310546875, \\\"Determination\\\": 0.003238677978515625, \\\"Disappointment\\\": 0.024169921875, \\\"Disgust\\\": 0.00702667236328125, \\\"Distress\\\": 0.00936126708984375, \\\"Doubt\\\": 0.00632476806640625, \\\"Ecstasy\\\": 0.0293731689453125, \\\"Embarrassment\\\": 0.01800537109375, \\\"Empathic Pain\\\": 0.0088348388671875, \\\"Entrancement\\\": 0.013397216796875, \\\"Envy\\\": 0.02557373046875, \\\"Excitement\\\": 0.12109375, \\\"Fear\\\": 0.004413604736328125, \\\"Guilt\\\": 0.016571044921875, \\\"Horror\\\": 0.00274658203125, \\\"Interest\\\": 0.2142333984375, \\\"Joy\\\": 0.29638671875, \\\"Love\\\": 0.16015625, \\\"Nostalgia\\\": 0.007843017578125, \\\"Pain\\\": 0.007160186767578125, \\\"Pride\\\": 0.00508880615234375, \\\"Realization\\\": 0.054229736328125, \\\"Relief\\\": 0.048736572265625, \\\"Romance\\\": 0.026397705078125, \\\"Sadness\\\": 0.0265350341796875, \\\"Satisfaction\\\": 0.051361083984375, \\\"Shame\\\": 0.00974273681640625, \\\"Surprise (negative)\\\": 0.0218963623046875, \\\"Surprise (positive)\\\": 0.216064453125, \\\"Sympathy\\\": 0.021728515625, \\\"Tiredness\\\": 0.0173797607421875, \\\"Triumph\\\": 0.004520416259765625}\",\n \"metadata\": \"{\\\"segments\\\": [{\\\"content\\\": \\\"Hello.\\\", \\\"embedding\\\": [0.6181640625, 0.1763916015625, -30.921875, 1.2705078125, 0.927734375, 0.63720703125, 2.865234375, 0.1080322265625, 0.2978515625, 1.0107421875, 1.34375, 0.74560546875, 0.416259765625, 0.99462890625, -0.333740234375, 0.361083984375, -1.388671875, 1.0107421875, 1.3173828125, 0.55615234375, 0.541015625, -0.1837158203125, 1.697265625, 0.228515625, 2.087890625, -0.311767578125, 0.053680419921875, 1.3349609375, 0.95068359375, 0.00441741943359375, 0.705078125, 1.8916015625, -0.939453125, 0.93701171875, -0.28955078125, 1.513671875, 0.5595703125, 1.0126953125, -0.1624755859375, 1.4072265625, -0.28857421875, -0.4560546875, -0.1500244140625, -0.1102294921875, -0.222412109375, 0.8779296875, 1.275390625, 1.6689453125, 0.80712890625, -0.34814453125, -0.325439453125, 0.412841796875, 0.81689453125, 0.55126953125, 1.671875, 0.6611328125, 0.7451171875, 1.50390625, 1.0224609375, -1.671875, 0.7373046875, 2.1328125, 2.166015625, 0.41015625, -0.127685546875, 1.9345703125, -4.2734375, 0.332275390625, 0.26171875, 0.76708984375, 0.2685546875, 0.468017578125, 1.208984375, -1.517578125, 1.083984375, 0.84814453125, 1.0244140625, -0.0072174072265625, 1.34375, 1.0712890625, 1.517578125, -0.52001953125, 0.59228515625, 0.8154296875, -0.951171875, -0.07757568359375, 1.3330078125, 1.125, 0.61181640625, 1.494140625, 0.357421875, 1.1796875, 1.482421875, 0.8046875, 0.1536865234375, 1.8076171875, 0.68115234375, -15.171875, 1.2294921875, 0.319091796875, 0.499755859375, 1.5771484375, 0.94677734375, -0.2490234375, 0.88525390625, 3.47265625, 0.75927734375, 0.71044921875, 1.2333984375, 1.4169921875, -0.56640625, -1.8095703125, 1.37109375, 0.428955078125, 1.89453125, -0.39013671875, 0.1734619140625, 1.5595703125, -1.2294921875, 2.552734375, 0.58349609375, 0.2156982421875, -0.00984954833984375, -0.6865234375, -0.0272979736328125, -0.2264404296875, 2.853515625, 1.3896484375, 0.52978515625, 0.783203125, 3.0390625, 0.75537109375, 0.219970703125, 0.384521484375, 0.385986328125, 2.0546875, -0.10443115234375, 1.5146484375, 1.4296875, 1.9716796875, 1.1318359375, 0.31591796875, 0.338623046875, 1.654296875, -0.88037109375, -0.21484375, 1.45703125, 1.0380859375, -0.52294921875, -0.47802734375, 0.1650390625, 1.2392578125, -1.138671875, 0.56787109375, 1.318359375, 0.4287109375, 0.1981201171875, 2.4375, 0.281005859375, 0.89404296875, -0.1552734375, 0.6474609375, -0.08331298828125, 0.00740814208984375, -0.045501708984375, -0.578125, 2.02734375, 0.59228515625, 0.35693359375, 1.2919921875, 1.22265625, 1.0537109375, 0.145263671875, 1.05859375, -0.369140625, 0.207275390625, 0.78857421875, 0.599609375, 0.99072265625, 0.24462890625, 1.26953125, 0.08404541015625, 1.349609375, 0.73291015625, 1.3212890625, 0.388916015625, 1.0869140625, 0.9931640625, -1.5673828125, 0.0462646484375, 0.650390625, 0.253662109375, 0.58251953125, 1.8134765625, 0.8642578125, 2.591796875, 0.7314453125, 0.85986328125, 0.5615234375, 0.9296875, 0.04144287109375, 1.66015625, 1.99609375, 1.171875, 1.181640625, 1.5126953125, 0.0224456787109375, 0.58349609375, -1.4931640625, 0.81884765625, 0.732421875, -0.6455078125, -0.62451171875, 1.7802734375, 0.01526641845703125, -0.423095703125, 0.461669921875, 4.87890625, 1.2392578125, -0.6953125, 0.6689453125, 0.62451171875, -1.521484375, 1.7685546875, 0.810546875, 0.65478515625, 0.26123046875, 1.6396484375, 0.87548828125, 1.7353515625, 2.046875, 1.5634765625, 0.69384765625, 1.375, 0.8916015625, 1.0107421875, 0.1304931640625, 2.009765625, 0.06402587890625, -0.08428955078125, 0.04351806640625, -1.7529296875, 2.02734375, 3.521484375, 0.404541015625, 1.6337890625, -0.276611328125, 0.8837890625, -0.1287841796875, 0.91064453125, 0.8193359375, 0.701171875, 0.036529541015625, 1.26171875, 1.0478515625, -0.1422119140625, 1.0634765625, 0.61083984375, 1.3505859375, 1.208984375, 0.57275390625, 1.3623046875, 2.267578125, 0.484375, 0.9150390625, 0.56787109375, -0.70068359375, 0.27587890625, -0.70654296875, 0.8466796875, 0.57568359375, 1.6162109375, 0.87939453125, 2.248046875, -0.5458984375, 1.7744140625, 1.328125, 1.232421875, 0.6806640625, 0.9365234375, 1.052734375, -1.08984375, 1.8330078125, -0.4033203125, 1.0673828125, 0.297607421875, 1.5703125, 1.67578125, 1.34765625, 2.8203125, 2.025390625, -0.48583984375, 0.7626953125, 0.01007843017578125, 1.435546875, 0.007205963134765625, 0.05157470703125, -0.9853515625, 0.26708984375, 1.16796875, 1.2041015625, 1.99609375, -0.07916259765625, 1.244140625, -0.32080078125, 0.6748046875, 0.419921875, 1.3212890625, 1.291015625, 0.599609375, 0.0550537109375, 0.9599609375, 0.93505859375, 0.111083984375, 1.302734375, 0.0833740234375, 2.244140625, 1.25390625, 1.6015625, 0.58349609375, 1.7568359375, -0.263427734375, -0.019866943359375, -0.24658203125, -0.1871337890625, 0.927734375, 0.62255859375, 0.275146484375, 0.79541015625, 1.1796875, 1.1767578125, -0.26123046875, -0.268310546875, 1.8994140625, 1.318359375, 2.1875, 0.2469482421875, 1.41015625, 0.03973388671875, 1.2685546875, 1.1025390625, 0.9560546875, 0.865234375, -1.92578125, 1.154296875, 0.389892578125, 1.130859375, 0.95947265625, 0.72314453125, 2.244140625, 0.048553466796875, 0.626953125, 0.42919921875, 0.82275390625, 0.311767578125, -0.320556640625, 0.01041412353515625, 0.1483154296875, 0.10809326171875, -0.3173828125, 1.1337890625, -0.8642578125, 1.4033203125, 0.048828125, 1.1787109375, 0.98779296875, 1.818359375, 1.1552734375, 0.6015625, 1.2392578125, -1.2685546875, 0.39208984375, 0.83251953125, 0.224365234375, 0.0019989013671875, 0.87548828125, 1.6572265625, 1.107421875, 0.434814453125, 1.8251953125, 0.442626953125, 1.2587890625, 0.09320068359375, -0.896484375, 1.8017578125, 1.451171875, -0.0755615234375, 0.6083984375, 2.06640625, 0.673828125, -0.33740234375, 0.192138671875, 0.21435546875, 0.80224609375, -1.490234375, 0.9501953125, 0.86083984375, -0.40283203125, 4.109375, 2.533203125, 1.2529296875, 0.8271484375, 0.225830078125, 1.0478515625, -1.9755859375, 0.841796875, 0.392822265625, 0.525390625, 0.33935546875, -0.79443359375, 0.71630859375, 0.97998046875, -0.175537109375, 0.97705078125, 1.705078125, 0.29638671875, 0.68359375, 0.54150390625, 0.435791015625, 0.99755859375, -0.369140625, 1.009765625, -0.140380859375, 0.426513671875, 0.189697265625, 1.8193359375, 1.1201171875, -0.5009765625, -0.331298828125, 0.759765625, -0.09442138671875, 0.74609375, -1.947265625, 1.3544921875, -3.935546875, 2.544921875, 1.359375, 0.1363525390625, 0.79296875, 0.79931640625, -0.3466796875, 1.1396484375, -0.33447265625, 2.0078125, -0.241455078125, 0.6318359375, 0.365234375, 0.296142578125, 0.830078125, 1.0458984375, 0.5830078125, 0.61572265625, 14.0703125, -2.0078125, -0.381591796875, 1.228515625, 0.08282470703125, -0.67822265625, -0.04339599609375, 0.397216796875, 0.1656494140625, 0.137451171875, 0.244873046875, 1.1611328125, -1.3818359375, 0.8447265625, 1.171875, 0.36328125, 0.252685546875, 0.1197509765625, 0.232177734375, -0.020172119140625, 0.64404296875, -0.01100921630859375, -1.9267578125, 0.222412109375, 0.56005859375, 1.3046875, 1.1630859375, 1.197265625, 1.02734375, 1.6806640625, -0.043731689453125, 1.4697265625, 0.81201171875, 1.5390625, 1.240234375, -0.7353515625, 1.828125, 1.115234375, 1.931640625, -0.517578125, 0.77880859375, 1.0546875, 0.95361328125, 3.42578125, 0.0160369873046875, 0.875, 0.56005859375, 1.2421875, 1.986328125, 1.4814453125, 0.0948486328125, 1.115234375, 0.00665283203125, 2.09375, 0.3544921875, -0.52783203125, 1.2099609375, 0.45068359375, 0.65625, 0.1112060546875, 1.0751953125, -0.9521484375, -0.30029296875, 1.4462890625, 2.046875, 3.212890625, 1.68359375, 1.07421875, -0.5263671875, 0.74560546875, 1.37890625, 0.15283203125, 0.2440185546875, 0.62646484375, -0.1280517578125, 0.7646484375, -0.515625, -0.35693359375, 1.2958984375, 0.96923828125, 0.58935546875, 1.3701171875, 1.0673828125, 0.2337646484375, 0.93115234375, 0.66357421875, 6.0, 1.1025390625, -0.51708984375, -0.38330078125, 0.7197265625, 0.246826171875, -0.45166015625, 1.9521484375, 0.5546875, 0.08807373046875, 0.18505859375, 0.8857421875, -0.57177734375, 0.251708984375, 0.234375, 2.57421875, 0.9599609375, 0.5029296875, 0.10382080078125, 0.08331298828125, 0.66748046875, -0.349609375, 1.287109375, 0.259765625, 2.015625, 2.828125, -0.3095703125, -0.164306640625, -0.3408203125, 0.486572265625, 0.8466796875, 1.9130859375, 0.09088134765625, 0.66552734375, 0.00972747802734375, -0.83154296875, 1.755859375, 0.654296875, 0.173828125, 0.27587890625, -0.47607421875, -0.264404296875, 0.7529296875, 0.6533203125, 0.7275390625, 0.499755859375, 0.833984375, -0.44775390625, -0.05078125, -0.454833984375, 0.75439453125, 0.68505859375, 0.210693359375, -0.283935546875, -0.53564453125, 0.96826171875, 0.861328125, -3.33984375, -0.26171875, 0.77734375, 0.26513671875, -0.14111328125, -0.042236328125, -0.84814453125, 0.2137451171875, 0.94921875, 0.65185546875, -0.5380859375, 0.1529541015625, -0.360595703125, -0.0333251953125, -0.69189453125, 0.8974609375, 0.7109375, 0.81494140625, -0.259521484375, 1.1904296875, 0.62158203125, 1.345703125, 0.89404296875, 0.70556640625, 1.0673828125, 1.392578125, 0.5068359375, 0.962890625, 0.736328125, 1.55078125, 0.50390625, -0.398681640625, 2.361328125, 0.345947265625, -0.61962890625, 0.330078125, 0.75439453125, -0.673828125, -0.2379150390625, 1.5673828125, 1.369140625, 0.1119384765625, -0.1834716796875, 1.4599609375, -0.77587890625, 0.5556640625, 0.09954833984375, 0.0285186767578125, 0.58935546875, -0.501953125, 0.212890625, 0.02679443359375, 0.1715087890625, 0.03466796875, -0.564453125, 2.029296875, 2.45703125, -0.72216796875, 2.138671875, 0.50830078125, -0.09356689453125, 0.230224609375, 1.6943359375, 1.5126953125, 0.39453125, 0.411376953125, 1.07421875, -0.8046875, 0.51416015625, 0.2271728515625, -0.283447265625, 0.38427734375, 0.73388671875, 0.6962890625, 1.4990234375, 0.02813720703125, 0.40478515625, 1.2451171875, 1.1162109375, -5.5703125, 0.76171875, 0.322021484375, 1.0361328125, 1.197265625, 0.1163330078125, 0.2425537109375, 1.5595703125, 1.5791015625, -0.0921630859375, 0.484619140625, 1.9052734375, 5.31640625, 1.6337890625, 0.95947265625, -0.1751708984375, 0.466552734375, 0.8330078125, 1.03125, 0.2044677734375, 0.31298828125, -1.1220703125, 0.5517578125, 0.93505859375, 0.45166015625, 1.951171875, 0.65478515625, 1.30859375, 1.0859375, 0.56494140625, 2.322265625, 0.242919921875, 1.81640625, -0.469970703125, -0.841796875, 0.90869140625, 1.5361328125, 0.923828125, 1.0595703125, 0.356689453125, -0.46142578125, 2.134765625, 1.3037109375, -0.32373046875, -9.2265625, 0.4521484375, 0.88037109375, -0.53955078125, 0.96484375, 0.7705078125, 0.84521484375, 1.580078125, -0.1448974609375, 0.7607421875, 1.0166015625, -0.086669921875, 1.611328125, 0.05938720703125, 0.5078125, 0.8427734375, 2.431640625, 0.66357421875, 3.203125, 0.132080078125, 0.461181640625, 0.779296875, 1.9482421875, 1.8720703125, 0.845703125, -1.3837890625, -0.138916015625, 0.35546875, 0.2457275390625, 0.75341796875, 1.828125, 1.4169921875, 0.60791015625, 1.0068359375, 1.109375, 0.484130859375, -0.302001953125, 0.4951171875, 0.802734375, 1.9482421875, 0.916015625, 0.1646728515625, 2.599609375, 1.7177734375, -0.2374267578125, 0.98046875, 0.39306640625, -1.1396484375, 1.6533203125, 0.375244140625], \\\"scores\\\": [0.09906005859375, 0.12213134765625, 0.05035400390625, 0.16552734375, 0.0037384033203125, 0.010101318359375, 0.058197021484375, 0.10552978515625, 0.1141357421875, 0.115234375, 0.00444793701171875, 0.00812530517578125, 0.0343017578125, 0.009002685546875, 0.087158203125, 0.00818634033203125, 0.003238677978515625, 0.024169921875, 0.00702667236328125, 0.00936126708984375, 0.00632476806640625, 0.0293731689453125, 0.01800537109375, 0.0088348388671875, 0.013397216796875, 0.02557373046875, 0.12109375, 0.004413604736328125, 0.016571044921875, 0.00274658203125, 0.2142333984375, 0.29638671875, 0.16015625, 0.007843017578125, 0.007160186767578125, 0.00508880615234375, 0.054229736328125, 0.048736572265625, 0.026397705078125, 0.0265350341796875, 0.051361083984375, 0.018310546875, 0.00974273681640625, 0.0218963623046875, 0.216064453125, 0.021728515625, 0.0173797607421875, 0.004520416259765625], \\\"stoks\\\": [52, 52, 52, 52, 52, 41, 41, 374, 303, 303, 303, 427], \\\"time\\\": {\\\"begin_ms\\\": 640, \\\"end_ms\\\": 1140}}]}\"\n },\n {\n \"id\": \"7645a0d1-2e64-410d-83a8-b96040432e9a\",\n \"chat_id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"timestamp\": 1716244957031,\n \"role\": \"AGENT\",\n \"type\": \"AGENT_MESSAGE\",\n \"message_text\": \"Hello!\",\n \"emotion_features\": \"{\\\"Admiration\\\": 0.044921875, \\\"Adoration\\\": 0.0253753662109375, \\\"Aesthetic Appreciation\\\": 0.03265380859375, \\\"Amusement\\\": 0.118408203125, \\\"Anger\\\": 0.06719970703125, \\\"Anxiety\\\": 0.0411376953125, \\\"Awe\\\": 0.03802490234375, \\\"Awkwardness\\\": 0.056549072265625, \\\"Boredom\\\": 0.04217529296875, \\\"Calmness\\\": 0.08709716796875, \\\"Concentration\\\": 0.070556640625, \\\"Confusion\\\": 0.06964111328125, \\\"Contemplation\\\": 0.0343017578125, \\\"Contempt\\\": 0.037689208984375, \\\"Contentment\\\": 0.059417724609375, \\\"Craving\\\": 0.01132965087890625, \\\"Desire\\\": 0.01406097412109375, \\\"Determination\\\": 0.1143798828125, \\\"Disappointment\\\": 0.051177978515625, \\\"Disgust\\\": 0.028594970703125, \\\"Distress\\\": 0.054901123046875, \\\"Doubt\\\": 0.04638671875, \\\"Ecstasy\\\": 0.0258026123046875, \\\"Embarrassment\\\": 0.0222015380859375, \\\"Empathic Pain\\\": 0.015777587890625, \\\"Entrancement\\\": 0.0160980224609375, \\\"Envy\\\": 0.0163421630859375, \\\"Excitement\\\": 0.129638671875, \\\"Fear\\\": 0.03125, \\\"Guilt\\\": 0.01483917236328125, \\\"Horror\\\": 0.0194549560546875, \\\"Interest\\\": 0.1341552734375, \\\"Joy\\\": 0.0738525390625, \\\"Love\\\": 0.0216522216796875, \\\"Nostalgia\\\": 0.0210418701171875, \\\"Pain\\\": 0.020721435546875, \\\"Pride\\\": 0.05499267578125, \\\"Realization\\\": 0.0728759765625, \\\"Relief\\\": 0.04052734375, \\\"Romance\\\": 0.0129241943359375, \\\"Sadness\\\": 0.0254669189453125, \\\"Satisfaction\\\": 0.07159423828125, \\\"Shame\\\": 0.01495361328125, \\\"Surprise (negative)\\\": 0.05560302734375, \\\"Surprise (positive)\\\": 0.07965087890625, \\\"Sympathy\\\": 0.022247314453125, \\\"Tiredness\\\": 0.0194549560546875, \\\"Triumph\\\": 0.04107666015625}\",\n \"metadata\": \"\"\n }\n ],\n \"page_number\": 0,\n \"page_size\": 3,\n \"total_pages\": 1,\n \"end_timestamp\": 1716244958546,\n \"metadata\": \"\",\n \"config\": {\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 0\n }\n}","headers":{"Content-Type":"application/json"}},"uuid":"333ce95c-76c7-4621-aa72-bb0ed90fcf50","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"1c210e60-fc3e-4020-ba31-155c211461a5","name":"Get chat audio - default","request":{"urlPathTemplate":"/v0/evi/chats/{id}/audio","method":"GET","pathParameters":{"id":{"equalTo":"470a49f6-1dec-4afe-8b61-035d3b2d63b0"}}},"response":{"status":200,"body":"{\n \"id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"user_id\": \"e6235940-cfda-3988-9147-ff531627cf42\",\n \"status\": \"COMPLETE\",\n \"filename\": \"e6235940-cfda-3988-9147-ff531627cf42/470a49f6-1dec-4afe-8b61-035d3b2d63b0/reconstructed_audio.mp4\",\n \"modified_at\": 1729875432555,\n \"signed_audio_url\": \"https://storage.googleapis.com/...etc.\",\n \"signed_url_expiration_timestamp_millis\": 1730232816964\n}","headers":{"Content-Type":"application/json"}},"uuid":"1c210e60-fc3e-4020-ba31-155c211461a5","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"95990ecd-a7f2-495c-84ec-e0b2f0f1e471","name":"List configs - default","request":{"urlPathTemplate":"/v0/evi/configs","method":"GET"},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 1,\n \"total_pages\": 1,\n \"configs_page\": [\n {\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 0,\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Config\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"evi_version\": \"3\",\n \"prompt\": {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n \"voice\": {\n \"provider\": \"HUME_AI\",\n \"name\": \"Ava Song\",\n \"id\": \"5bb7de05-c8fe-426a-8fcc-ba4fc4ce9f9c\"\n },\n \"language_model\": {\n \"model_provider\": \"ANTHROPIC\",\n \"model_resource\": \"claude-3-7-sonnet-latest\",\n \"temperature\": 1\n },\n \"ellm_model\": {\n \"allow_short_responses\": false\n },\n \"tools\": [],\n \"builtin_tools\": [],\n \"event_messages\": {\n \"on_new_chat\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_inactivity_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_max_duration_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n }\n },\n \"timeouts\": {\n \"inactivity\": {\n \"enabled\": true,\n \"duration_secs\": 600\n },\n \"max_duration\": {\n \"enabled\": true,\n \"duration_secs\": 1800\n }\n }\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"95990ecd-a7f2-495c-84ec-e0b2f0f1e471","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"73a53d69-e4fb-44c0-a4cc-3ebdee8e8c36","name":"Create config - default","request":{"urlPathTemplate":"/v0/evi/configs","method":"POST"},"response":{"status":201,"body":"{\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 0,\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Config\",\n \"created_on\": 1715275452390,\n \"modified_on\": 1715275452390,\n \"evi_version\": \"3\",\n \"prompt\": {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n \"voice\": {\n \"provider\": \"HUME_AI\",\n \"name\": \"Ava Song\",\n \"id\": \"5bb7de05-c8fe-426a-8fcc-ba4fc4ce9f9c\"\n },\n \"language_model\": {\n \"model_provider\": \"ANTHROPIC\",\n \"model_resource\": \"claude-3-7-sonnet-latest\",\n \"temperature\": 1\n },\n \"ellm_model\": {\n \"allow_short_responses\": false\n },\n \"tools\": [],\n \"builtin_tools\": [],\n \"event_messages\": {\n \"on_new_chat\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_inactivity_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_max_duration_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n }\n },\n \"timeouts\": {\n \"inactivity\": {\n \"enabled\": true,\n \"duration_secs\": 600\n },\n \"max_duration\": {\n \"enabled\": true,\n \"duration_secs\": 1800\n }\n }\n}","headers":{"Content-Type":"application/json"}},"uuid":"73a53d69-e4fb-44c0-a4cc-3ebdee8e8c36","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"9fa7c906-3213-4358-9fd3-fb98a80ccff9","name":"List config versions - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}","method":"GET","pathParameters":{"id":{"equalTo":"1b60e1a0-cc59-424a-8d2c-189d354db3f3"}}},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 10,\n \"total_pages\": 1,\n \"configs_page\": [\n {\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 0,\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Config\",\n \"created_on\": 1715275452390,\n \"modified_on\": 1715275452390,\n \"evi_version\": \"3\",\n \"prompt\": {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n \"voice\": {\n \"provider\": \"HUME_AI\",\n \"name\": \"Ava Song\",\n \"id\": \"5bb7de05-c8fe-426a-8fcc-ba4fc4ce9f9c\"\n },\n \"language_model\": {\n \"model_provider\": \"ANTHROPIC\",\n \"model_resource\": \"claude-3-7-sonnet-latest\",\n \"temperature\": 1\n },\n \"ellm_model\": {\n \"allow_short_responses\": false\n },\n \"tools\": [],\n \"builtin_tools\": [],\n \"event_messages\": {\n \"on_new_chat\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_inactivity_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_max_duration_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n }\n },\n \"timeouts\": {\n \"inactivity\": {\n \"enabled\": true,\n \"duration_secs\": 600\n },\n \"max_duration\": {\n \"enabled\": true,\n \"duration_secs\": 1800\n }\n }\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"9fa7c906-3213-4358-9fd3-fb98a80ccff9","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"66129a1a-71df-4899-a702-a2582339ad92","name":"Create config version - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}","method":"POST","pathParameters":{"id":{"equalTo":"1b60e1a0-cc59-424a-8d2c-189d354db3f3"}}},"response":{"status":201,"body":"{\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 1,\n \"version_description\": \"This is an updated version of the Weather Assistant Config.\",\n \"name\": \"Weather Assistant Config\",\n \"created_on\": 1715275452390,\n \"modified_on\": 1722642242998,\n \"evi_version\": \"3\",\n \"prompt\": {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n \"voice\": {\n \"provider\": \"HUME_AI\",\n \"name\": \"Ava Song\",\n \"id\": \"5bb7de05-c8fe-426a-8fcc-ba4fc4ce9f9c\"\n },\n \"language_model\": {\n \"model_provider\": \"ANTHROPIC\",\n \"model_resource\": \"claude-3-7-sonnet-latest\",\n \"temperature\": 1\n },\n \"ellm_model\": {\n \"allow_short_responses\": true\n },\n \"tools\": [],\n \"builtin_tools\": [],\n \"event_messages\": {\n \"on_new_chat\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_inactivity_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_max_duration_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n }\n },\n \"timeouts\": {\n \"inactivity\": {\n \"enabled\": true,\n \"duration_secs\": 600\n },\n \"max_duration\": {\n \"enabled\": true,\n \"duration_secs\": 1800\n }\n }\n}","headers":{"Content-Type":"application/json"}},"uuid":"66129a1a-71df-4899-a702-a2582339ad92","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"96ab709e-8ce5-42ad-8930-11174405a07f","name":"Delete config - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}","method":"DELETE","pathParameters":{"id":{"equalTo":"1b60e1a0-cc59-424a-8d2c-189d354db3f3"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"96ab709e-8ce5-42ad-8930-11174405a07f","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"ac036d7f-9200-454a-ad8b-058cedce8018","name":"Update config name - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}","method":"PATCH","pathParameters":{"id":{"equalTo":"1b60e1a0-cc59-424a-8d2c-189d354db3f3"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"ac036d7f-9200-454a-ad8b-058cedce8018","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"336e4516-6577-4544-8e15-559222ec726d","name":"Get config version - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}/version/{version}","method":"GET","pathParameters":{"id":{"equalTo":"1b60e1a0-cc59-424a-8d2c-189d354db3f3"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"{\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 1,\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Config\",\n \"created_on\": 1715275452390,\n \"modified_on\": 1715275452390,\n \"evi_version\": \"3\",\n \"prompt\": {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n \"voice\": {\n \"provider\": \"HUME_AI\",\n \"name\": \"Ava Song\",\n \"id\": \"5bb7de05-c8fe-426a-8fcc-ba4fc4ce9f9c\"\n },\n \"language_model\": {\n \"model_provider\": \"ANTHROPIC\",\n \"model_resource\": \"claude-3-7-sonnet-latest\",\n \"temperature\": 1\n },\n \"ellm_model\": {\n \"allow_short_responses\": false\n },\n \"tools\": [],\n \"builtin_tools\": [],\n \"event_messages\": {\n \"on_new_chat\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_inactivity_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_max_duration_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n }\n },\n \"timeouts\": {\n \"inactivity\": {\n \"enabled\": true,\n \"duration_secs\": 600\n },\n \"max_duration\": {\n \"enabled\": true,\n \"duration_secs\": 1800\n }\n }\n}","headers":{"Content-Type":"application/json"}},"uuid":"336e4516-6577-4544-8e15-559222ec726d","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"3631f032-af11-4788-9ada-63c97ec90d03","name":"Delete config version - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}/version/{version}","method":"DELETE","pathParameters":{"id":{"equalTo":"1b60e1a0-cc59-424a-8d2c-189d354db3f3"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"3631f032-af11-4788-9ada-63c97ec90d03","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"4a8a419e-4772-4077-95b5-281c2bd2851b","name":"Update config description - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}/version/{version}","method":"PATCH","pathParameters":{"id":{"equalTo":"1b60e1a0-cc59-424a-8d2c-189d354db3f3"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"{\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 1,\n \"version_description\": \"This is an updated version_description.\",\n \"name\": \"Weather Assistant Config\",\n \"created_on\": 1715275452390,\n \"modified_on\": 1715275452390,\n \"evi_version\": \"3\",\n \"prompt\": {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n \"voice\": {\n \"provider\": \"HUME_AI\",\n \"name\": \"Ava Song\",\n \"id\": \"5bb7de05-c8fe-426a-8fcc-ba4fc4ce9f9c\"\n },\n \"language_model\": {\n \"model_provider\": \"ANTHROPIC\",\n \"model_resource\": \"claude-3-7-sonnet-latest\",\n \"temperature\": 1\n },\n \"ellm_model\": {\n \"allow_short_responses\": false\n },\n \"tools\": [],\n \"builtin_tools\": [],\n \"event_messages\": {\n \"on_new_chat\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_inactivity_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_max_duration_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n }\n },\n \"timeouts\": {\n \"inactivity\": {\n \"enabled\": true,\n \"duration_secs\": 600\n },\n \"max_duration\": {\n \"enabled\": true,\n \"duration_secs\": 1800\n }\n }\n}","headers":{"Content-Type":"application/json"}},"uuid":"4a8a419e-4772-4077-95b5-281c2bd2851b","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"3760f9c4-fbaa-40e9-9770-35af4592adb1","name":"List prompts - default","request":{"urlPathTemplate":"/v0/evi/prompts","method":"GET"},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 2,\n \"total_pages\": 1,\n \"prompts_page\": [\n {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n {\n \"id\": \"616b2b4c-a096-4445-9c23-64058b564fc2\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Web Search Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI web search assistant designed to help users find accurate and relevant information on the web. Respond to user queries promptly, using the built-in web search tool to retrieve up-to-date results. Present information clearly and concisely, summarizing key points where necessary. Use simple language and avoid technical jargon. If needed, provide helpful tips for refining search queries to obtain better results.\"\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"3760f9c4-fbaa-40e9-9770-35af4592adb1","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"1fab19ce-cf01-4f16-b221-518ebe235e05","name":"Create prompt - default","request":{"urlPathTemplate":"/v0/evi/prompts","method":"POST"},"response":{"status":201,"body":"{\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": null,\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1722633247488,\n \"modified_on\": 1722633247488,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"1fab19ce-cf01-4f16-b221-518ebe235e05","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"818fa71c-38b4-45da-92f3-167567201251","name":"List prompt versions - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}","method":"GET","pathParameters":{"id":{"equalTo":"af699d45-2985-42cc-91b9-af9e5da3bac5"}}},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 10,\n \"total_pages\": 1,\n \"prompts_page\": [\n {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1722633247488,\n \"modified_on\": 1722633247488,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"818fa71c-38b4-45da-92f3-167567201251","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"b2978df3-51e4-4d99-b160-6287c20dda6f","name":"Create prompt version - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}","method":"POST","pathParameters":{"id":{"equalTo":"af699d45-2985-42cc-91b9-af9e5da3bac5"}}},"response":{"status":201,"body":"{\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 1,\n \"version_type\": \"FIXED\",\n \"version_description\": \"This is an updated version of the Weather Assistant Prompt.\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1722633247488,\n \"modified_on\": 1722635140150,\n \"text\": \"You are an updated version of an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"b2978df3-51e4-4d99-b160-6287c20dda6f","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"722d1802-e538-4d83-9720-79ff982e0011","name":"Delete prompt - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}","method":"DELETE","pathParameters":{"id":{"equalTo":"af699d45-2985-42cc-91b9-af9e5da3bac5"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"722d1802-e538-4d83-9720-79ff982e0011","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"6e0c3636-6d00-4e26-92d7-40460ef14c6c","name":"Update prompt name - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}","method":"PATCH","pathParameters":{"id":{"equalTo":"af699d45-2985-42cc-91b9-af9e5da3bac5"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"6e0c3636-6d00-4e26-92d7-40460ef14c6c","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"3812aa90-4c73-4d5b-a38b-d5ca8efde008","name":"Get prompt version - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}/version/{version}","method":"GET","pathParameters":{"id":{"equalTo":"af699d45-2985-42cc-91b9-af9e5da3bac5"},"version":{"equalTo":"0"}}},"response":{"status":200,"body":"{\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1722633247488,\n \"modified_on\": 1722633247488,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"3812aa90-4c73-4d5b-a38b-d5ca8efde008","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"27f84b39-bfde-4b0f-a49e-fbd93767a180","name":"Delete prompt version - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}/version/{version}","method":"DELETE","pathParameters":{"id":{"equalTo":"af699d45-2985-42cc-91b9-af9e5da3bac5"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"27f84b39-bfde-4b0f-a49e-fbd93767a180","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"760d3795-9a2c-4a38-940a-6b7459dff285","name":"Update prompt description - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}/version/{version}","method":"PATCH","pathParameters":{"id":{"equalTo":"af699d45-2985-42cc-91b9-af9e5da3bac5"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"{\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 1,\n \"version_type\": \"FIXED\",\n \"version_description\": \"This is an updated version_description.\",\n \"name\": \"string\",\n \"created_on\": 1722633247488,\n \"modified_on\": 1722634770585,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"760d3795-9a2c-4a38-940a-6b7459dff285","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"76070823-253f-4b18-9c68-a80f6d2373ee","name":"List tools - default","request":{"urlPathTemplate":"/v0/evi/tools","method":"GET"},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 2,\n \"total_pages\": 1,\n \"tools_page\": [\n {\n \"tool_type\": \"FUNCTION\",\n \"id\": \"d20827af-5d8d-4f66-b6b9-ce2e3e1ea2b2\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches user's current location.\",\n \"name\": \"get_current_location\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"fallback_content\": \"Unable to fetch location.\",\n \"description\": \"Fetches user's current location.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }}, \\\"required\\\": [\\\"location\\\"] }\"\n },\n {\n \"tool_type\": \"FUNCTION\",\n \"id\": \"4442f3ea-9038-40e3-a2ce-1522b7de770f\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches current weather and uses celsius or fahrenheit based on location of user.\",\n \"name\": \"get_current_weather\",\n \"created_on\": 1715266126705,\n \"modified_on\": 1715266126705,\n \"fallback_content\": \"Unable to fetch location.\",\n \"description\": \"Fetches current weather and uses celsius or fahrenheit based on location of user.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }, \\\"format\\\": { \\\"type\\\": \\\"string\\\", \\\"enum\\\": [\\\"celsius\\\", \\\"fahrenheit\\\"], \\\"description\\\": \\\"The temperature unit to use. Infer this from the users location.\\\" } }, \\\"required\\\": [\\\"location\\\", \\\"format\\\"] }\"\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"76070823-253f-4b18-9c68-a80f6d2373ee","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"7dc71f1b-3d70-4527-a21f-326f7db77dec","name":"Create tool - default","request":{"urlPathTemplate":"/v0/evi/tools","method":"POST"},"response":{"status":201,"body":"{\n \"tool_type\": \"FUNCTION\",\n \"id\": \"aa9b71c4-723c-47ff-9f83-1a1829e74376\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches current weather and uses celsius or fahrenheit based on location of user.\",\n \"name\": \"get_current_weather\",\n \"created_on\": 1715275452390,\n \"modified_on\": 1715275452390,\n \"fallback_content\": \"Unable to fetch current weather.\",\n \"description\": \"This tool is for getting the current weather.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }, \\\"format\\\": { \\\"type\\\": \\\"string\\\", \\\"enum\\\": [\\\"celsius\\\", \\\"fahrenheit\\\"], \\\"description\\\": \\\"The temperature unit to use. Infer this from the users location.\\\" } }, \\\"required\\\": [\\\"location\\\", \\\"format\\\"] }\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"7dc71f1b-3d70-4527-a21f-326f7db77dec","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"97f54c38-2fe1-4344-9355-88765ad39e92","name":"List tool versions - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}","method":"GET","pathParameters":{"id":{"equalTo":"00183a3f-79ba-413d-9f3b-609864268bea"}}},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 10,\n \"total_pages\": 1,\n \"tools_page\": [\n {\n \"tool_type\": \"FUNCTION\",\n \"id\": \"00183a3f-79ba-413d-9f3b-609864268bea\",\n \"version\": 1,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches current weather and uses celsius, fahrenheit, or kelvin based on location of user.\",\n \"name\": \"get_current_weather\",\n \"created_on\": 1715277014228,\n \"modified_on\": 1715277602313,\n \"fallback_content\": \"Unable to fetch current weather.\",\n \"description\": \"This tool is for getting the current weather.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }, \\\"format\\\": { \\\"type\\\": \\\"string\\\", \\\"enum\\\": [\\\"celsius\\\", \\\"fahrenheit\\\", \\\"kelvin\\\"], \\\"description\\\": \\\"The temperature unit to use. Infer this from the users location.\\\" } }, \\\"required\\\": [\\\"location\\\", \\\"format\\\"] }\"\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"97f54c38-2fe1-4344-9355-88765ad39e92","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"1230a7c7-82ee-4940-98d3-3f0e1acc2cba","name":"Create tool version - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}","method":"POST","pathParameters":{"id":{"equalTo":"00183a3f-79ba-413d-9f3b-609864268bea"}}},"response":{"status":201,"body":"{\n \"tool_type\": \"FUNCTION\",\n \"id\": \"00183a3f-79ba-413d-9f3b-609864268bea\",\n \"version\": 1,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches current weather and uses celsius, fahrenheit, or kelvin based on location of user.\",\n \"name\": \"get_current_weather\",\n \"created_on\": 1715277014228,\n \"modified_on\": 1715277602313,\n \"fallback_content\": \"Unable to fetch current weather.\",\n \"description\": \"This tool is for getting the current weather.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }, \\\"format\\\": { \\\"type\\\": \\\"string\\\", \\\"enum\\\": [\\\"celsius\\\", \\\"fahrenheit\\\", \\\"kelvin\\\"], \\\"description\\\": \\\"The temperature unit to use. Infer this from the users location.\\\" } }, \\\"required\\\": [\\\"location\\\", \\\"format\\\"] }\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"1230a7c7-82ee-4940-98d3-3f0e1acc2cba","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"6e58cf98-dbd8-465b-a6cc-53941c38f006","name":"Delete tool - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}","method":"DELETE","pathParameters":{"id":{"equalTo":"00183a3f-79ba-413d-9f3b-609864268bea"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"6e58cf98-dbd8-465b-a6cc-53941c38f006","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"7bb920f0-9b75-4374-bdfb-540a599f3fce","name":"Update tool name - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}","method":"PATCH","pathParameters":{"id":{"equalTo":"00183a3f-79ba-413d-9f3b-609864268bea"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"7bb920f0-9b75-4374-bdfb-540a599f3fce","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"f4fe78ec-6b74-402d-8aef-0cbd04c6a473","name":"Get tool version - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}/version/{version}","method":"GET","pathParameters":{"id":{"equalTo":"00183a3f-79ba-413d-9f3b-609864268bea"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"{\n \"tool_type\": \"FUNCTION\",\n \"id\": \"00183a3f-79ba-413d-9f3b-609864268bea\",\n \"version\": 1,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches current weather and uses celsius, fahrenheit, or kelvin based on location of user.\",\n \"name\": \"string\",\n \"created_on\": 1715277014228,\n \"modified_on\": 1715277602313,\n \"fallback_content\": \"Unable to fetch current weather.\",\n \"description\": \"This tool is for getting the current weather.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }, \\\"format\\\": { \\\"type\\\": \\\"string\\\", \\\"enum\\\": [\\\"celsius\\\", \\\"fahrenheit\\\", \\\"kelvin\\\"], \\\"description\\\": \\\"The temperature unit to use. Infer this from the users location.\\\" } }, \\\"required\\\": [\\\"location\\\", \\\"format\\\"] }\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"f4fe78ec-6b74-402d-8aef-0cbd04c6a473","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"74ef73ce-49e2-492e-a4cb-ea6a1dc9b948","name":"Delete tool version - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}/version/{version}","method":"DELETE","pathParameters":{"id":{"equalTo":"00183a3f-79ba-413d-9f3b-609864268bea"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"74ef73ce-49e2-492e-a4cb-ea6a1dc9b948","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"82d8c062-5280-4378-afad-8057ebea037a","name":"Update tool description - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}/version/{version}","method":"PATCH","pathParameters":{"id":{"equalTo":"00183a3f-79ba-413d-9f3b-609864268bea"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"{\n \"tool_type\": \"FUNCTION\",\n \"id\": \"00183a3f-79ba-413d-9f3b-609864268bea\",\n \"version\": 1,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches current temperature, precipitation, wind speed, AQI, and other weather conditions. Uses Celsius, Fahrenheit, or kelvin depending on user's region.\",\n \"name\": \"string\",\n \"created_on\": 1715277014228,\n \"modified_on\": 1715277602313,\n \"fallback_content\": \"Unable to fetch current weather.\",\n \"description\": \"This tool is for getting the current weather.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }, \\\"format\\\": { \\\"type\\\": \\\"string\\\", \\\"enum\\\": [\\\"celsius\\\", \\\"fahrenheit\\\", \\\"kelvin\\\"], \\\"description\\\": \\\"The temperature unit to use. Infer this from the users location.\\\" } }, \\\"required\\\": [\\\"location\\\", \\\"format\\\"] }\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"82d8c062-5280-4378-afad-8057ebea037a","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"ac2d855e-1240-4aa0-88e2-61efb0c0026a","name":"List jobs - default","request":{"urlPathTemplate":"/v0/batch/jobs","method":"GET"},"response":{"status":200,"body":"[\n {\n \"job_id\": \"job_id\",\n \"request\": {\n \"callback_url\": null,\n \"files\": [\n {\n \"filename\": \"filename\",\n \"md5sum\": \"md5sum\",\n \"content_type\": \"content_type\"\n }\n ],\n \"models\": {\n \"burst\": {},\n \"face\": {\n \"descriptions\": null,\n \"facs\": null,\n \"fps_pred\": 3,\n \"identify_faces\": false,\n \"min_face_size\": 60,\n \"prob_threshold\": 0.99,\n \"save_faces\": false\n },\n \"facemesh\": {},\n \"language\": {\n \"granularity\": \"word\",\n \"identify_speakers\": false,\n \"sentiment\": null,\n \"toxicity\": null\n },\n \"ner\": {\n \"identify_speakers\": false\n },\n \"prosody\": {\n \"granularity\": \"utterance\",\n \"identify_speakers\": false,\n \"window\": null\n }\n },\n \"notify\": true,\n \"text\": [],\n \"urls\": [\n \"https://hume-tutorials.s3.amazonaws.com/faces.zip\"\n ]\n },\n \"state\": {\n \"created_timestamp_ms\": 1712587158717,\n \"ended_timestamp_ms\": 1712587159274,\n \"num_errors\": 0,\n \"num_predictions\": 10,\n \"started_timestamp_ms\": 1712587158800,\n \"status\": \"COMPLETED\"\n },\n \"type\": \"INFERENCE\"\n }\n]","headers":{"Content-Type":"application/json"}},"uuid":"ac2d855e-1240-4aa0-88e2-61efb0c0026a","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"53d86d3c-c150-433f-ab12-ef8e8e9210eb","name":"Start inference job - default","request":{"urlPathTemplate":"/v0/batch/jobs","method":"POST"},"response":{"status":200,"body":"{\n \"job_id\": \"job_id\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"53d86d3c-c150-433f-ab12-ef8e8e9210eb","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"b5838b5a-8fe8-4d6b-9439-5e77b6a8b017","name":"Get job details - default","request":{"urlPathTemplate":"/v0/batch/jobs/{id}","method":"GET","pathParameters":{"id":{"equalTo":"job_id"}}},"response":{"status":200,"body":"{\n \"type\": \"INFERENCE\",\n \"job_id\": \"job_id\",\n \"request\": {\n \"callback_url\": null,\n \"files\": [],\n \"models\": {\n \"burst\": {},\n \"face\": {\n \"descriptions\": null,\n \"facs\": null,\n \"fps_pred\": 3,\n \"identify_faces\": false,\n \"min_face_size\": 60,\n \"prob_threshold\": 0.99,\n \"save_faces\": false\n },\n \"facemesh\": {},\n \"language\": {\n \"granularity\": \"word\",\n \"identify_speakers\": false,\n \"sentiment\": null,\n \"toxicity\": null\n },\n \"ner\": {\n \"identify_speakers\": false\n },\n \"prosody\": {\n \"granularity\": \"utterance\",\n \"identify_speakers\": false,\n \"window\": null\n }\n },\n \"notify\": true,\n \"text\": [],\n \"urls\": [\n \"https://hume-tutorials.s3.amazonaws.com/faces.zip\"\n ]\n },\n \"state\": {\n \"created_timestamp_ms\": 1712590457884,\n \"ended_timestamp_ms\": 1712590462252,\n \"num_errors\": 0,\n \"num_predictions\": 10,\n \"started_timestamp_ms\": 1712590457995,\n \"status\": \"COMPLETED\"\n }\n}","headers":{"Content-Type":"application/json"}},"uuid":"b5838b5a-8fe8-4d6b-9439-5e77b6a8b017","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"27496704-1278-4ec0-804b-5b06a9ddad44","name":"Get job predictions - default","request":{"urlPathTemplate":"/v0/batch/jobs/{id}/predictions","method":"GET","pathParameters":{"id":{"equalTo":"job_id"}}},"response":{"status":200,"body":"[\n {\n \"source\": {\n \"type\": \"url\",\n \"url\": \"https://hume-tutorials.s3.amazonaws.com/faces.zip\"\n },\n \"results\": {\n \"predictions\": [\n {\n \"file\": \"faces/100.jpg\",\n \"models\": {\n \"face\": {\n \"metadata\": null,\n \"grouped_predictions\": [\n {\n \"id\": \"unknown\",\n \"predictions\": [\n {\n \"frame\": 0,\n \"time\": 0,\n \"prob\": 0.9994111061096191,\n \"box\": {\n \"x\": 1187.885986328125,\n \"y\": 1397.697509765625,\n \"w\": 1401.668701171875,\n \"h\": 1961.424560546875\n },\n \"emotions\": [\n {\n \"name\": \"Admiration\",\n \"score\": 0.10722749680280685\n },\n {\n \"name\": \"Adoration\",\n \"score\": 0.06395940482616425\n },\n {\n \"name\": \"Aesthetic Appreciation\",\n \"score\": 0.05811462551355362\n },\n {\n \"name\": \"Amusement\",\n \"score\": 0.14187128841876984\n },\n {\n \"name\": \"Anger\",\n \"score\": 0.02804684266448021\n },\n {\n \"name\": \"Anxiety\",\n \"score\": 0.2713485360145569\n },\n {\n \"name\": \"Awe\",\n \"score\": 0.33812594413757324\n },\n {\n \"name\": \"Awkwardness\",\n \"score\": 0.1745193600654602\n },\n {\n \"name\": \"Boredom\",\n \"score\": 0.23600080609321594\n },\n {\n \"name\": \"Calmness\",\n \"score\": 0.18988418579101562\n },\n {\n \"name\": \"Concentration\",\n \"score\": 0.44288986921310425\n },\n {\n \"name\": \"Confusion\",\n \"score\": 0.39346569776535034\n },\n {\n \"name\": \"Contemplation\",\n \"score\": 0.31002455949783325\n },\n {\n \"name\": \"Contempt\",\n \"score\": 0.048870109021663666\n },\n {\n \"name\": \"Contentment\",\n \"score\": 0.0579497292637825\n },\n {\n \"name\": \"Craving\",\n \"score\": 0.06544201076030731\n },\n {\n \"name\": \"Desire\",\n \"score\": 0.05526508390903473\n },\n {\n \"name\": \"Determination\",\n \"score\": 0.08590991795063019\n },\n {\n \"name\": \"Disappointment\",\n \"score\": 0.19508258998394012\n },\n {\n \"name\": \"Disgust\",\n \"score\": 0.031529419124126434\n },\n {\n \"name\": \"Distress\",\n \"score\": 0.23210826516151428\n },\n {\n \"name\": \"Doubt\",\n \"score\": 0.3284550905227661\n },\n {\n \"name\": \"Ecstasy\",\n \"score\": 0.040716782212257385\n },\n {\n \"name\": \"Embarrassment\",\n \"score\": 0.1467227339744568\n },\n {\n \"name\": \"Empathic Pain\",\n \"score\": 0.07633581757545471\n },\n {\n \"name\": \"Entrancement\",\n \"score\": 0.16245244443416595\n },\n {\n \"name\": \"Envy\",\n \"score\": 0.03267110139131546\n },\n {\n \"name\": \"Excitement\",\n \"score\": 0.10656816512346268\n },\n {\n \"name\": \"Fear\",\n \"score\": 0.3115977346897125\n },\n {\n \"name\": \"Guilt\",\n \"score\": 0.11615975946187973\n },\n {\n \"name\": \"Horror\",\n \"score\": 0.19795553386211395\n },\n {\n \"name\": \"Interest\",\n \"score\": 0.3136432468891144\n },\n {\n \"name\": \"Joy\",\n \"score\": 0.06285581737756729\n },\n {\n \"name\": \"Love\",\n \"score\": 0.06339752674102783\n },\n {\n \"name\": \"Nostalgia\",\n \"score\": 0.05866732448339462\n },\n {\n \"name\": \"Pain\",\n \"score\": 0.07684041559696198\n },\n {\n \"name\": \"Pride\",\n \"score\": 0.026822954416275024\n },\n {\n \"name\": \"Realization\",\n \"score\": 0.30000734329223633\n },\n {\n \"name\": \"Relief\",\n \"score\": 0.04414166510105133\n },\n {\n \"name\": \"Romance\",\n \"score\": 0.042728863656520844\n },\n {\n \"name\": \"Sadness\",\n \"score\": 0.14773206412792206\n },\n {\n \"name\": \"Satisfaction\",\n \"score\": 0.05902980640530586\n },\n {\n \"name\": \"Shame\",\n \"score\": 0.08103451132774353\n },\n {\n \"name\": \"Surprise (negative)\",\n \"score\": 0.25518184900283813\n },\n {\n \"name\": \"Surprise (positive)\",\n \"score\": 0.28845661878585815\n },\n {\n \"name\": \"Sympathy\",\n \"score\": 0.062488824129104614\n },\n {\n \"name\": \"Tiredness\",\n \"score\": 0.1559651643037796\n },\n {\n \"name\": \"Triumph\",\n \"score\": 0.01955239288508892\n }\n ],\n \"facs\": null,\n \"descriptions\": null\n }\n ]\n }\n ]\n }\n }\n }\n ],\n \"errors\": []\n }\n }\n]","headers":{"Content-Type":"application/json"}},"uuid":"27496704-1278-4ec0-804b-5b06a9ddad44","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"12d3146e-fa86-4658-bfca-755d6e939757","name":"Start inference job from local file - default","request":{"urlPathTemplate":"/v0/batch/jobs","method":"POST"},"response":{"status":200,"body":"{\n \"job_id\": \"job_id\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"12d3146e-fa86-4658-bfca-755d6e939757","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}}],"meta":{"total":48}}
\ No newline at end of file
+{"mappings":[{"id":"e0fc3f32-35c4-4c49-89f2-eaa4e21b9444","name":"Send Message - default","request":{"urlPathTemplate":"/v0/evi/chat/{chat_id}/send","method":"POST","pathParameters":{"chat_id":{"equalTo":"chat_id"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"e0fc3f32-35c4-4c49-89f2-eaa4e21b9444","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"7e5b16a7-b7b9-4e7d-b0a9-61b1ac4b5f7d","name":"List chat_groups - default","request":{"urlPathTemplate":"/v0/evi/chat_groups","method":"GET"},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 1,\n \"total_pages\": 1,\n \"pagination_direction\": \"ASC\",\n \"chat_groups_page\": [\n {\n \"id\": \"697056f0-6c7e-487d-9bd8-9c19df79f05f\",\n \"first_start_timestamp\": 1721844196397,\n \"most_recent_start_timestamp\": 1721861821717,\n \"active\": false,\n \"most_recent_chat_id\": \"dfdbdd4d-0ddf-418b-8fc4-80a266579d36\",\n \"num_chats\": 5\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"7e5b16a7-b7b9-4e7d-b0a9-61b1ac4b5f7d","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"52c3c012-9681-44fd-b0dd-d644aac44f8c","name":"Get chat_group - default","request":{"urlPathTemplate":"/v0/evi/chat_groups/{id}","method":"GET","pathParameters":{"id":{"equalTo":"your-chat-group-id"}}},"response":{"status":200,"body":"{\n \"id\": \"369846cf-6ad5-404d-905e-a8acb5cdfc78\",\n \"first_start_timestamp\": 1712334213647,\n \"most_recent_start_timestamp\": 1712334213647,\n \"num_chats\": 1,\n \"page_number\": 0,\n \"page_size\": 1,\n \"total_pages\": 1,\n \"pagination_direction\": \"ASC\",\n \"chats_page\": [\n {\n \"id\": \"6375d4f8-cd3e-4d6b-b13b-ace66b7c8aaa\",\n \"chat_group_id\": \"369846cf-6ad5-404d-905e-a8acb5cdfc78\",\n \"status\": \"USER_ENDED\",\n \"start_timestamp\": 1712334213647,\n \"end_timestamp\": 1712334332571,\n \"event_count\": 0,\n \"metadata\": null,\n \"config\": null\n }\n ],\n \"active\": false\n}","headers":{"Content-Type":"application/json"}},"uuid":"52c3c012-9681-44fd-b0dd-d644aac44f8c","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"ef5bd433-148e-481b-b653-7b4676a3fbbb","name":"Get chat group audio - default","request":{"urlPathTemplate":"/v0/evi/chat_groups/{id}/audio","method":"GET","pathParameters":{"id":{"equalTo":"your-chat-group-id"}}},"response":{"status":200,"body":"{\n \"id\": \"369846cf-6ad5-404d-905e-a8acb5cdfc78\",\n \"user_id\": \"e6235940-cfda-3988-9147-ff531627cf42\",\n \"num_chats\": 1,\n \"page_number\": 0,\n \"page_size\": 10,\n \"total_pages\": 1,\n \"pagination_direction\": \"ASC\",\n \"audio_reconstructions_page\": [\n {\n \"id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"user_id\": \"e6235940-cfda-3988-9147-ff531627cf42\",\n \"status\": \"COMPLETE\",\n \"filename\": \"e6235940-cfda-3988-9147-ff531627cf42/470a49f6-1dec-4afe-8b61-035d3b2d63b0/reconstructed_audio.mp4\",\n \"modified_at\": 1729875432555,\n \"signed_audio_url\": \"https://storage.googleapis.com/...etc.\",\n \"signed_url_expiration_timestamp_millis\": 1730232816964\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"ef5bd433-148e-481b-b653-7b4676a3fbbb","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"63f657bf-ebac-4bc4-bbae-5a46d2babec4","name":"List chat events from a specific chat_group - default","request":{"urlPathTemplate":"/v0/evi/chat_groups/{id}/events","method":"GET","pathParameters":{"id":{"equalTo":"your-chat-group-id"}}},"response":{"status":200,"body":"{\n \"id\": \"697056f0-6c7e-487d-9bd8-9c19df79f05f\",\n \"page_number\": 0,\n \"page_size\": 3,\n \"total_pages\": 1,\n \"pagination_direction\": \"ASC\",\n \"events_page\": [\n {\n \"id\": \"5d44bdbb-49a3-40fb-871d-32bf7e76efe7\",\n \"chat_id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"timestamp\": 1716244940762,\n \"role\": \"SYSTEM\",\n \"type\": \"SYSTEM_PROMPT\",\n \"message_text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\",\n \"emotion_features\": \"\",\n \"metadata\": \"\"\n },\n {\n \"id\": \"5976ddf6-d093-4bb9-ba60-8f6c25832dde\",\n \"chat_id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"timestamp\": 1716244956278,\n \"role\": \"USER\",\n \"type\": \"USER_MESSAGE\",\n \"message_text\": \"Hello.\",\n \"emotion_features\": \"{\\\"Admiration\\\": 0.09906005859375, \\\"Adoration\\\": 0.12213134765625, \\\"Aesthetic Appreciation\\\": 0.05035400390625, \\\"Amusement\\\": 0.16552734375, \\\"Anger\\\": 0.0037384033203125, \\\"Anxiety\\\": 0.010101318359375, \\\"Awe\\\": 0.058197021484375, \\\"Awkwardness\\\": 0.10552978515625, \\\"Boredom\\\": 0.1141357421875, \\\"Calmness\\\": 0.115234375, \\\"Concentration\\\": 0.00444793701171875, \\\"Confusion\\\": 0.0343017578125, \\\"Contemplation\\\": 0.00812530517578125, \\\"Contempt\\\": 0.009002685546875, \\\"Contentment\\\": 0.087158203125, \\\"Craving\\\": 0.00818634033203125, \\\"Desire\\\": 0.018310546875, \\\"Determination\\\": 0.003238677978515625, \\\"Disappointment\\\": 0.024169921875, \\\"Disgust\\\": 0.00702667236328125, \\\"Distress\\\": 0.00936126708984375, \\\"Doubt\\\": 0.00632476806640625, \\\"Ecstasy\\\": 0.0293731689453125, \\\"Embarrassment\\\": 0.01800537109375, \\\"Empathic Pain\\\": 0.0088348388671875, \\\"Entrancement\\\": 0.013397216796875, \\\"Envy\\\": 0.02557373046875, \\\"Excitement\\\": 0.12109375, \\\"Fear\\\": 0.004413604736328125, \\\"Guilt\\\": 0.016571044921875, \\\"Horror\\\": 0.00274658203125, \\\"Interest\\\": 0.2142333984375, \\\"Joy\\\": 0.29638671875, \\\"Love\\\": 0.16015625, \\\"Nostalgia\\\": 0.007843017578125, \\\"Pain\\\": 0.007160186767578125, \\\"Pride\\\": 0.00508880615234375, \\\"Realization\\\": 0.054229736328125, \\\"Relief\\\": 0.048736572265625, \\\"Romance\\\": 0.026397705078125, \\\"Sadness\\\": 0.0265350341796875, \\\"Satisfaction\\\": 0.051361083984375, \\\"Shame\\\": 0.00974273681640625, \\\"Surprise (negative)\\\": 0.0218963623046875, \\\"Surprise (positive)\\\": 0.216064453125, \\\"Sympathy\\\": 0.021728515625, \\\"Tiredness\\\": 0.0173797607421875, \\\"Triumph\\\": 0.004520416259765625}\",\n \"metadata\": \"{\\\"segments\\\": [{\\\"content\\\": \\\"Hello.\\\", \\\"embedding\\\": [0.6181640625, 0.1763916015625, -30.921875, 1.2705078125, 0.927734375, 0.63720703125, 2.865234375, 0.1080322265625, 0.2978515625, 1.0107421875, 1.34375, 0.74560546875, 0.416259765625, 0.99462890625, -0.333740234375, 0.361083984375, -1.388671875, 1.0107421875, 1.3173828125, 0.55615234375, 0.541015625, -0.1837158203125, 1.697265625, 0.228515625, 2.087890625, -0.311767578125, 0.053680419921875, 1.3349609375, 0.95068359375, 0.00441741943359375, 0.705078125, 1.8916015625, -0.939453125, 0.93701171875, -0.28955078125, 1.513671875, 0.5595703125, 1.0126953125, -0.1624755859375, 1.4072265625, -0.28857421875, -0.4560546875, -0.1500244140625, -0.1102294921875, -0.222412109375, 0.8779296875, 1.275390625, 1.6689453125, 0.80712890625, -0.34814453125, -0.325439453125, 0.412841796875, 0.81689453125, 0.55126953125, 1.671875, 0.6611328125, 0.7451171875, 1.50390625, 1.0224609375, -1.671875, 0.7373046875, 2.1328125, 2.166015625, 0.41015625, -0.127685546875, 1.9345703125, -4.2734375, 0.332275390625, 0.26171875, 0.76708984375, 0.2685546875, 0.468017578125, 1.208984375, -1.517578125, 1.083984375, 0.84814453125, 1.0244140625, -0.0072174072265625, 1.34375, 1.0712890625, 1.517578125, -0.52001953125, 0.59228515625, 0.8154296875, -0.951171875, -0.07757568359375, 1.3330078125, 1.125, 0.61181640625, 1.494140625, 0.357421875, 1.1796875, 1.482421875, 0.8046875, 0.1536865234375, 1.8076171875, 0.68115234375, -15.171875, 1.2294921875, 0.319091796875, 0.499755859375, 1.5771484375, 0.94677734375, -0.2490234375, 0.88525390625, 3.47265625, 0.75927734375, 0.71044921875, 1.2333984375, 1.4169921875, -0.56640625, -1.8095703125, 1.37109375, 0.428955078125, 1.89453125, -0.39013671875, 0.1734619140625, 1.5595703125, -1.2294921875, 2.552734375, 0.58349609375, 0.2156982421875, -0.00984954833984375, -0.6865234375, -0.0272979736328125, -0.2264404296875, 2.853515625, 1.3896484375, 0.52978515625, 0.783203125, 3.0390625, 0.75537109375, 0.219970703125, 0.384521484375, 0.385986328125, 2.0546875, -0.10443115234375, 1.5146484375, 1.4296875, 1.9716796875, 1.1318359375, 0.31591796875, 0.338623046875, 1.654296875, -0.88037109375, -0.21484375, 1.45703125, 1.0380859375, -0.52294921875, -0.47802734375, 0.1650390625, 1.2392578125, -1.138671875, 0.56787109375, 1.318359375, 0.4287109375, 0.1981201171875, 2.4375, 0.281005859375, 0.89404296875, -0.1552734375, 0.6474609375, -0.08331298828125, 0.00740814208984375, -0.045501708984375, -0.578125, 2.02734375, 0.59228515625, 0.35693359375, 1.2919921875, 1.22265625, 1.0537109375, 0.145263671875, 1.05859375, -0.369140625, 0.207275390625, 0.78857421875, 0.599609375, 0.99072265625, 0.24462890625, 1.26953125, 0.08404541015625, 1.349609375, 0.73291015625, 1.3212890625, 0.388916015625, 1.0869140625, 0.9931640625, -1.5673828125, 0.0462646484375, 0.650390625, 0.253662109375, 0.58251953125, 1.8134765625, 0.8642578125, 2.591796875, 0.7314453125, 0.85986328125, 0.5615234375, 0.9296875, 0.04144287109375, 1.66015625, 1.99609375, 1.171875, 1.181640625, 1.5126953125, 0.0224456787109375, 0.58349609375, -1.4931640625, 0.81884765625, 0.732421875, -0.6455078125, -0.62451171875, 1.7802734375, 0.01526641845703125, -0.423095703125, 0.461669921875, 4.87890625, 1.2392578125, -0.6953125, 0.6689453125, 0.62451171875, -1.521484375, 1.7685546875, 0.810546875, 0.65478515625, 0.26123046875, 1.6396484375, 0.87548828125, 1.7353515625, 2.046875, 1.5634765625, 0.69384765625, 1.375, 0.8916015625, 1.0107421875, 0.1304931640625, 2.009765625, 0.06402587890625, -0.08428955078125, 0.04351806640625, -1.7529296875, 2.02734375, 3.521484375, 0.404541015625, 1.6337890625, -0.276611328125, 0.8837890625, -0.1287841796875, 0.91064453125, 0.8193359375, 0.701171875, 0.036529541015625, 1.26171875, 1.0478515625, -0.1422119140625, 1.0634765625, 0.61083984375, 1.3505859375, 1.208984375, 0.57275390625, 1.3623046875, 2.267578125, 0.484375, 0.9150390625, 0.56787109375, -0.70068359375, 0.27587890625, -0.70654296875, 0.8466796875, 0.57568359375, 1.6162109375, 0.87939453125, 2.248046875, -0.5458984375, 1.7744140625, 1.328125, 1.232421875, 0.6806640625, 0.9365234375, 1.052734375, -1.08984375, 1.8330078125, -0.4033203125, 1.0673828125, 0.297607421875, 1.5703125, 1.67578125, 1.34765625, 2.8203125, 2.025390625, -0.48583984375, 0.7626953125, 0.01007843017578125, 1.435546875, 0.007205963134765625, 0.05157470703125, -0.9853515625, 0.26708984375, 1.16796875, 1.2041015625, 1.99609375, -0.07916259765625, 1.244140625, -0.32080078125, 0.6748046875, 0.419921875, 1.3212890625, 1.291015625, 0.599609375, 0.0550537109375, 0.9599609375, 0.93505859375, 0.111083984375, 1.302734375, 0.0833740234375, 2.244140625, 1.25390625, 1.6015625, 0.58349609375, 1.7568359375, -0.263427734375, -0.019866943359375, -0.24658203125, -0.1871337890625, 0.927734375, 0.62255859375, 0.275146484375, 0.79541015625, 1.1796875, 1.1767578125, -0.26123046875, -0.268310546875, 1.8994140625, 1.318359375, 2.1875, 0.2469482421875, 1.41015625, 0.03973388671875, 1.2685546875, 1.1025390625, 0.9560546875, 0.865234375, -1.92578125, 1.154296875, 0.389892578125, 1.130859375, 0.95947265625, 0.72314453125, 2.244140625, 0.048553466796875, 0.626953125, 0.42919921875, 0.82275390625, 0.311767578125, -0.320556640625, 0.01041412353515625, 0.1483154296875, 0.10809326171875, -0.3173828125, 1.1337890625, -0.8642578125, 1.4033203125, 0.048828125, 1.1787109375, 0.98779296875, 1.818359375, 1.1552734375, 0.6015625, 1.2392578125, -1.2685546875, 0.39208984375, 0.83251953125, 0.224365234375, 0.0019989013671875, 0.87548828125, 1.6572265625, 1.107421875, 0.434814453125, 1.8251953125, 0.442626953125, 1.2587890625, 0.09320068359375, -0.896484375, 1.8017578125, 1.451171875, -0.0755615234375, 0.6083984375, 2.06640625, 0.673828125, -0.33740234375, 0.192138671875, 0.21435546875, 0.80224609375, -1.490234375, 0.9501953125, 0.86083984375, -0.40283203125, 4.109375, 2.533203125, 1.2529296875, 0.8271484375, 0.225830078125, 1.0478515625, -1.9755859375, 0.841796875, 0.392822265625, 0.525390625, 0.33935546875, -0.79443359375, 0.71630859375, 0.97998046875, -0.175537109375, 0.97705078125, 1.705078125, 0.29638671875, 0.68359375, 0.54150390625, 0.435791015625, 0.99755859375, -0.369140625, 1.009765625, -0.140380859375, 0.426513671875, 0.189697265625, 1.8193359375, 1.1201171875, -0.5009765625, -0.331298828125, 0.759765625, -0.09442138671875, 0.74609375, -1.947265625, 1.3544921875, -3.935546875, 2.544921875, 1.359375, 0.1363525390625, 0.79296875, 0.79931640625, -0.3466796875, 1.1396484375, -0.33447265625, 2.0078125, -0.241455078125, 0.6318359375, 0.365234375, 0.296142578125, 0.830078125, 1.0458984375, 0.5830078125, 0.61572265625, 14.0703125, -2.0078125, -0.381591796875, 1.228515625, 0.08282470703125, -0.67822265625, -0.04339599609375, 0.397216796875, 0.1656494140625, 0.137451171875, 0.244873046875, 1.1611328125, -1.3818359375, 0.8447265625, 1.171875, 0.36328125, 0.252685546875, 0.1197509765625, 0.232177734375, -0.020172119140625, 0.64404296875, -0.01100921630859375, -1.9267578125, 0.222412109375, 0.56005859375, 1.3046875, 1.1630859375, 1.197265625, 1.02734375, 1.6806640625, -0.043731689453125, 1.4697265625, 0.81201171875, 1.5390625, 1.240234375, -0.7353515625, 1.828125, 1.115234375, 1.931640625, -0.517578125, 0.77880859375, 1.0546875, 0.95361328125, 3.42578125, 0.0160369873046875, 0.875, 0.56005859375, 1.2421875, 1.986328125, 1.4814453125, 0.0948486328125, 1.115234375, 0.00665283203125, 2.09375, 0.3544921875, -0.52783203125, 1.2099609375, 0.45068359375, 0.65625, 0.1112060546875, 1.0751953125, -0.9521484375, -0.30029296875, 1.4462890625, 2.046875, 3.212890625, 1.68359375, 1.07421875, -0.5263671875, 0.74560546875, 1.37890625, 0.15283203125, 0.2440185546875, 0.62646484375, -0.1280517578125, 0.7646484375, -0.515625, -0.35693359375, 1.2958984375, 0.96923828125, 0.58935546875, 1.3701171875, 1.0673828125, 0.2337646484375, 0.93115234375, 0.66357421875, 6.0, 1.1025390625, -0.51708984375, -0.38330078125, 0.7197265625, 0.246826171875, -0.45166015625, 1.9521484375, 0.5546875, 0.08807373046875, 0.18505859375, 0.8857421875, -0.57177734375, 0.251708984375, 0.234375, 2.57421875, 0.9599609375, 0.5029296875, 0.10382080078125, 0.08331298828125, 0.66748046875, -0.349609375, 1.287109375, 0.259765625, 2.015625, 2.828125, -0.3095703125, -0.164306640625, -0.3408203125, 0.486572265625, 0.8466796875, 1.9130859375, 0.09088134765625, 0.66552734375, 0.00972747802734375, -0.83154296875, 1.755859375, 0.654296875, 0.173828125, 0.27587890625, -0.47607421875, -0.264404296875, 0.7529296875, 0.6533203125, 0.7275390625, 0.499755859375, 0.833984375, -0.44775390625, -0.05078125, -0.454833984375, 0.75439453125, 0.68505859375, 0.210693359375, -0.283935546875, -0.53564453125, 0.96826171875, 0.861328125, -3.33984375, -0.26171875, 0.77734375, 0.26513671875, -0.14111328125, -0.042236328125, -0.84814453125, 0.2137451171875, 0.94921875, 0.65185546875, -0.5380859375, 0.1529541015625, -0.360595703125, -0.0333251953125, -0.69189453125, 0.8974609375, 0.7109375, 0.81494140625, -0.259521484375, 1.1904296875, 0.62158203125, 1.345703125, 0.89404296875, 0.70556640625, 1.0673828125, 1.392578125, 0.5068359375, 0.962890625, 0.736328125, 1.55078125, 0.50390625, -0.398681640625, 2.361328125, 0.345947265625, -0.61962890625, 0.330078125, 0.75439453125, -0.673828125, -0.2379150390625, 1.5673828125, 1.369140625, 0.1119384765625, -0.1834716796875, 1.4599609375, -0.77587890625, 0.5556640625, 0.09954833984375, 0.0285186767578125, 0.58935546875, -0.501953125, 0.212890625, 0.02679443359375, 0.1715087890625, 0.03466796875, -0.564453125, 2.029296875, 2.45703125, -0.72216796875, 2.138671875, 0.50830078125, -0.09356689453125, 0.230224609375, 1.6943359375, 1.5126953125, 0.39453125, 0.411376953125, 1.07421875, -0.8046875, 0.51416015625, 0.2271728515625, -0.283447265625, 0.38427734375, 0.73388671875, 0.6962890625, 1.4990234375, 0.02813720703125, 0.40478515625, 1.2451171875, 1.1162109375, -5.5703125, 0.76171875, 0.322021484375, 1.0361328125, 1.197265625, 0.1163330078125, 0.2425537109375, 1.5595703125, 1.5791015625, -0.0921630859375, 0.484619140625, 1.9052734375, 5.31640625, 1.6337890625, 0.95947265625, -0.1751708984375, 0.466552734375, 0.8330078125, 1.03125, 0.2044677734375, 0.31298828125, -1.1220703125, 0.5517578125, 0.93505859375, 0.45166015625, 1.951171875, 0.65478515625, 1.30859375, 1.0859375, 0.56494140625, 2.322265625, 0.242919921875, 1.81640625, -0.469970703125, -0.841796875, 0.90869140625, 1.5361328125, 0.923828125, 1.0595703125, 0.356689453125, -0.46142578125, 2.134765625, 1.3037109375, -0.32373046875, -9.2265625, 0.4521484375, 0.88037109375, -0.53955078125, 0.96484375, 0.7705078125, 0.84521484375, 1.580078125, -0.1448974609375, 0.7607421875, 1.0166015625, -0.086669921875, 1.611328125, 0.05938720703125, 0.5078125, 0.8427734375, 2.431640625, 0.66357421875, 3.203125, 0.132080078125, 0.461181640625, 0.779296875, 1.9482421875, 1.8720703125, 0.845703125, -1.3837890625, -0.138916015625, 0.35546875, 0.2457275390625, 0.75341796875, 1.828125, 1.4169921875, 0.60791015625, 1.0068359375, 1.109375, 0.484130859375, -0.302001953125, 0.4951171875, 0.802734375, 1.9482421875, 0.916015625, 0.1646728515625, 2.599609375, 1.7177734375, -0.2374267578125, 0.98046875, 0.39306640625, -1.1396484375, 1.6533203125, 0.375244140625], \\\"scores\\\": [0.09906005859375, 0.12213134765625, 0.05035400390625, 0.16552734375, 0.0037384033203125, 0.010101318359375, 0.058197021484375, 0.10552978515625, 0.1141357421875, 0.115234375, 0.00444793701171875, 0.00812530517578125, 0.0343017578125, 0.009002685546875, 0.087158203125, 0.00818634033203125, 0.003238677978515625, 0.024169921875, 0.00702667236328125, 0.00936126708984375, 0.00632476806640625, 0.0293731689453125, 0.01800537109375, 0.0088348388671875, 0.013397216796875, 0.02557373046875, 0.12109375, 0.004413604736328125, 0.016571044921875, 0.00274658203125, 0.2142333984375, 0.29638671875, 0.16015625, 0.007843017578125, 0.007160186767578125, 0.00508880615234375, 0.054229736328125, 0.048736572265625, 0.026397705078125, 0.0265350341796875, 0.051361083984375, 0.018310546875, 0.00974273681640625, 0.0218963623046875, 0.216064453125, 0.021728515625, 0.0173797607421875, 0.004520416259765625], \\\"stoks\\\": [52, 52, 52, 52, 52, 41, 41, 374, 303, 303, 303, 427], \\\"time\\\": {\\\"begin_ms\\\": 640, \\\"end_ms\\\": 1140}}]}\"\n },\n {\n \"id\": \"7645a0d1-2e64-410d-83a8-b96040432e9a\",\n \"chat_id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"timestamp\": 1716244957031,\n \"role\": \"AGENT\",\n \"type\": \"AGENT_MESSAGE\",\n \"message_text\": \"Hello!\",\n \"emotion_features\": \"{\\\"Admiration\\\": 0.044921875, \\\"Adoration\\\": 0.0253753662109375, \\\"Aesthetic Appreciation\\\": 0.03265380859375, \\\"Amusement\\\": 0.118408203125, \\\"Anger\\\": 0.06719970703125, \\\"Anxiety\\\": 0.0411376953125, \\\"Awe\\\": 0.03802490234375, \\\"Awkwardness\\\": 0.056549072265625, \\\"Boredom\\\": 0.04217529296875, \\\"Calmness\\\": 0.08709716796875, \\\"Concentration\\\": 0.070556640625, \\\"Confusion\\\": 0.06964111328125, \\\"Contemplation\\\": 0.0343017578125, \\\"Contempt\\\": 0.037689208984375, \\\"Contentment\\\": 0.059417724609375, \\\"Craving\\\": 0.01132965087890625, \\\"Desire\\\": 0.01406097412109375, \\\"Determination\\\": 0.1143798828125, \\\"Disappointment\\\": 0.051177978515625, \\\"Disgust\\\": 0.028594970703125, \\\"Distress\\\": 0.054901123046875, \\\"Doubt\\\": 0.04638671875, \\\"Ecstasy\\\": 0.0258026123046875, \\\"Embarrassment\\\": 0.0222015380859375, \\\"Empathic Pain\\\": 0.015777587890625, \\\"Entrancement\\\": 0.0160980224609375, \\\"Envy\\\": 0.0163421630859375, \\\"Excitement\\\": 0.129638671875, \\\"Fear\\\": 0.03125, \\\"Guilt\\\": 0.01483917236328125, \\\"Horror\\\": 0.0194549560546875, \\\"Interest\\\": 0.1341552734375, \\\"Joy\\\": 0.0738525390625, \\\"Love\\\": 0.0216522216796875, \\\"Nostalgia\\\": 0.0210418701171875, \\\"Pain\\\": 0.020721435546875, \\\"Pride\\\": 0.05499267578125, \\\"Realization\\\": 0.0728759765625, \\\"Relief\\\": 0.04052734375, \\\"Romance\\\": 0.0129241943359375, \\\"Sadness\\\": 0.0254669189453125, \\\"Satisfaction\\\": 0.07159423828125, \\\"Shame\\\": 0.01495361328125, \\\"Surprise (negative)\\\": 0.05560302734375, \\\"Surprise (positive)\\\": 0.07965087890625, \\\"Sympathy\\\": 0.022247314453125, \\\"Tiredness\\\": 0.0194549560546875, \\\"Triumph\\\": 0.04107666015625}\",\n \"metadata\": \"\"\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"63f657bf-ebac-4bc4-bbae-5a46d2babec4","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"741b2853-034c-43df-9eb0-7e4ff5d57dec","name":"List chats - default","request":{"urlPathTemplate":"/v0/evi/chats","method":"GET"},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 1,\n \"total_pages\": 1,\n \"pagination_direction\": \"ASC\",\n \"chats_page\": [\n {\n \"id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"chat_group_id\": \"9fc18597-3567-42d5-94d6-935bde84bf2f\",\n \"status\": \"USER_ENDED\",\n \"start_timestamp\": 1716244940648,\n \"end_timestamp\": 1716244958546,\n \"event_count\": 3,\n \"metadata\": \"\",\n \"config\": {\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 0\n }\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"741b2853-034c-43df-9eb0-7e4ff5d57dec","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"333ce95c-76c7-4621-aa72-bb0ed90fcf50","name":"List chat events - default","request":{"urlPathTemplate":"/v0/evi/chats/{id}","method":"GET","pathParameters":{"id":{"equalTo":"your-chat-id"}}},"response":{"status":200,"body":"{\n \"id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"chat_group_id\": \"9fc18597-3567-42d5-94d6-935bde84bf2f\",\n \"status\": \"USER_ENDED\",\n \"start_timestamp\": 1716244940648,\n \"pagination_direction\": \"ASC\",\n \"events_page\": [\n {\n \"id\": \"5d44bdbb-49a3-40fb-871d-32bf7e76efe7\",\n \"chat_id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"timestamp\": 1716244940762,\n \"role\": \"SYSTEM\",\n \"type\": \"SYSTEM_PROMPT\",\n \"message_text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\",\n \"emotion_features\": \"\",\n \"metadata\": \"\"\n },\n {\n \"id\": \"5976ddf6-d093-4bb9-ba60-8f6c25832dde\",\n \"chat_id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"timestamp\": 1716244956278,\n \"role\": \"USER\",\n \"type\": \"USER_MESSAGE\",\n \"message_text\": \"Hello.\",\n \"emotion_features\": \"{\\\"Admiration\\\": 0.09906005859375, \\\"Adoration\\\": 0.12213134765625, \\\"Aesthetic Appreciation\\\": 0.05035400390625, \\\"Amusement\\\": 0.16552734375, \\\"Anger\\\": 0.0037384033203125, \\\"Anxiety\\\": 0.010101318359375, \\\"Awe\\\": 0.058197021484375, \\\"Awkwardness\\\": 0.10552978515625, \\\"Boredom\\\": 0.1141357421875, \\\"Calmness\\\": 0.115234375, \\\"Concentration\\\": 0.00444793701171875, \\\"Confusion\\\": 0.0343017578125, \\\"Contemplation\\\": 0.00812530517578125, \\\"Contempt\\\": 0.009002685546875, \\\"Contentment\\\": 0.087158203125, \\\"Craving\\\": 0.00818634033203125, \\\"Desire\\\": 0.018310546875, \\\"Determination\\\": 0.003238677978515625, \\\"Disappointment\\\": 0.024169921875, \\\"Disgust\\\": 0.00702667236328125, \\\"Distress\\\": 0.00936126708984375, \\\"Doubt\\\": 0.00632476806640625, \\\"Ecstasy\\\": 0.0293731689453125, \\\"Embarrassment\\\": 0.01800537109375, \\\"Empathic Pain\\\": 0.0088348388671875, \\\"Entrancement\\\": 0.013397216796875, \\\"Envy\\\": 0.02557373046875, \\\"Excitement\\\": 0.12109375, \\\"Fear\\\": 0.004413604736328125, \\\"Guilt\\\": 0.016571044921875, \\\"Horror\\\": 0.00274658203125, \\\"Interest\\\": 0.2142333984375, \\\"Joy\\\": 0.29638671875, \\\"Love\\\": 0.16015625, \\\"Nostalgia\\\": 0.007843017578125, \\\"Pain\\\": 0.007160186767578125, \\\"Pride\\\": 0.00508880615234375, \\\"Realization\\\": 0.054229736328125, \\\"Relief\\\": 0.048736572265625, \\\"Romance\\\": 0.026397705078125, \\\"Sadness\\\": 0.0265350341796875, \\\"Satisfaction\\\": 0.051361083984375, \\\"Shame\\\": 0.00974273681640625, \\\"Surprise (negative)\\\": 0.0218963623046875, \\\"Surprise (positive)\\\": 0.216064453125, \\\"Sympathy\\\": 0.021728515625, \\\"Tiredness\\\": 0.0173797607421875, \\\"Triumph\\\": 0.004520416259765625}\",\n \"metadata\": \"{\\\"segments\\\": [{\\\"content\\\": \\\"Hello.\\\", \\\"embedding\\\": [0.6181640625, 0.1763916015625, -30.921875, 1.2705078125, 0.927734375, 0.63720703125, 2.865234375, 0.1080322265625, 0.2978515625, 1.0107421875, 1.34375, 0.74560546875, 0.416259765625, 0.99462890625, -0.333740234375, 0.361083984375, -1.388671875, 1.0107421875, 1.3173828125, 0.55615234375, 0.541015625, -0.1837158203125, 1.697265625, 0.228515625, 2.087890625, -0.311767578125, 0.053680419921875, 1.3349609375, 0.95068359375, 0.00441741943359375, 0.705078125, 1.8916015625, -0.939453125, 0.93701171875, -0.28955078125, 1.513671875, 0.5595703125, 1.0126953125, -0.1624755859375, 1.4072265625, -0.28857421875, -0.4560546875, -0.1500244140625, -0.1102294921875, -0.222412109375, 0.8779296875, 1.275390625, 1.6689453125, 0.80712890625, -0.34814453125, -0.325439453125, 0.412841796875, 0.81689453125, 0.55126953125, 1.671875, 0.6611328125, 0.7451171875, 1.50390625, 1.0224609375, -1.671875, 0.7373046875, 2.1328125, 2.166015625, 0.41015625, -0.127685546875, 1.9345703125, -4.2734375, 0.332275390625, 0.26171875, 0.76708984375, 0.2685546875, 0.468017578125, 1.208984375, -1.517578125, 1.083984375, 0.84814453125, 1.0244140625, -0.0072174072265625, 1.34375, 1.0712890625, 1.517578125, -0.52001953125, 0.59228515625, 0.8154296875, -0.951171875, -0.07757568359375, 1.3330078125, 1.125, 0.61181640625, 1.494140625, 0.357421875, 1.1796875, 1.482421875, 0.8046875, 0.1536865234375, 1.8076171875, 0.68115234375, -15.171875, 1.2294921875, 0.319091796875, 0.499755859375, 1.5771484375, 0.94677734375, -0.2490234375, 0.88525390625, 3.47265625, 0.75927734375, 0.71044921875, 1.2333984375, 1.4169921875, -0.56640625, -1.8095703125, 1.37109375, 0.428955078125, 1.89453125, -0.39013671875, 0.1734619140625, 1.5595703125, -1.2294921875, 2.552734375, 0.58349609375, 0.2156982421875, -0.00984954833984375, -0.6865234375, -0.0272979736328125, -0.2264404296875, 2.853515625, 1.3896484375, 0.52978515625, 0.783203125, 3.0390625, 0.75537109375, 0.219970703125, 0.384521484375, 0.385986328125, 2.0546875, -0.10443115234375, 1.5146484375, 1.4296875, 1.9716796875, 1.1318359375, 0.31591796875, 0.338623046875, 1.654296875, -0.88037109375, -0.21484375, 1.45703125, 1.0380859375, -0.52294921875, -0.47802734375, 0.1650390625, 1.2392578125, -1.138671875, 0.56787109375, 1.318359375, 0.4287109375, 0.1981201171875, 2.4375, 0.281005859375, 0.89404296875, -0.1552734375, 0.6474609375, -0.08331298828125, 0.00740814208984375, -0.045501708984375, -0.578125, 2.02734375, 0.59228515625, 0.35693359375, 1.2919921875, 1.22265625, 1.0537109375, 0.145263671875, 1.05859375, -0.369140625, 0.207275390625, 0.78857421875, 0.599609375, 0.99072265625, 0.24462890625, 1.26953125, 0.08404541015625, 1.349609375, 0.73291015625, 1.3212890625, 0.388916015625, 1.0869140625, 0.9931640625, -1.5673828125, 0.0462646484375, 0.650390625, 0.253662109375, 0.58251953125, 1.8134765625, 0.8642578125, 2.591796875, 0.7314453125, 0.85986328125, 0.5615234375, 0.9296875, 0.04144287109375, 1.66015625, 1.99609375, 1.171875, 1.181640625, 1.5126953125, 0.0224456787109375, 0.58349609375, -1.4931640625, 0.81884765625, 0.732421875, -0.6455078125, -0.62451171875, 1.7802734375, 0.01526641845703125, -0.423095703125, 0.461669921875, 4.87890625, 1.2392578125, -0.6953125, 0.6689453125, 0.62451171875, -1.521484375, 1.7685546875, 0.810546875, 0.65478515625, 0.26123046875, 1.6396484375, 0.87548828125, 1.7353515625, 2.046875, 1.5634765625, 0.69384765625, 1.375, 0.8916015625, 1.0107421875, 0.1304931640625, 2.009765625, 0.06402587890625, -0.08428955078125, 0.04351806640625, -1.7529296875, 2.02734375, 3.521484375, 0.404541015625, 1.6337890625, -0.276611328125, 0.8837890625, -0.1287841796875, 0.91064453125, 0.8193359375, 0.701171875, 0.036529541015625, 1.26171875, 1.0478515625, -0.1422119140625, 1.0634765625, 0.61083984375, 1.3505859375, 1.208984375, 0.57275390625, 1.3623046875, 2.267578125, 0.484375, 0.9150390625, 0.56787109375, -0.70068359375, 0.27587890625, -0.70654296875, 0.8466796875, 0.57568359375, 1.6162109375, 0.87939453125, 2.248046875, -0.5458984375, 1.7744140625, 1.328125, 1.232421875, 0.6806640625, 0.9365234375, 1.052734375, -1.08984375, 1.8330078125, -0.4033203125, 1.0673828125, 0.297607421875, 1.5703125, 1.67578125, 1.34765625, 2.8203125, 2.025390625, -0.48583984375, 0.7626953125, 0.01007843017578125, 1.435546875, 0.007205963134765625, 0.05157470703125, -0.9853515625, 0.26708984375, 1.16796875, 1.2041015625, 1.99609375, -0.07916259765625, 1.244140625, -0.32080078125, 0.6748046875, 0.419921875, 1.3212890625, 1.291015625, 0.599609375, 0.0550537109375, 0.9599609375, 0.93505859375, 0.111083984375, 1.302734375, 0.0833740234375, 2.244140625, 1.25390625, 1.6015625, 0.58349609375, 1.7568359375, -0.263427734375, -0.019866943359375, -0.24658203125, -0.1871337890625, 0.927734375, 0.62255859375, 0.275146484375, 0.79541015625, 1.1796875, 1.1767578125, -0.26123046875, -0.268310546875, 1.8994140625, 1.318359375, 2.1875, 0.2469482421875, 1.41015625, 0.03973388671875, 1.2685546875, 1.1025390625, 0.9560546875, 0.865234375, -1.92578125, 1.154296875, 0.389892578125, 1.130859375, 0.95947265625, 0.72314453125, 2.244140625, 0.048553466796875, 0.626953125, 0.42919921875, 0.82275390625, 0.311767578125, -0.320556640625, 0.01041412353515625, 0.1483154296875, 0.10809326171875, -0.3173828125, 1.1337890625, -0.8642578125, 1.4033203125, 0.048828125, 1.1787109375, 0.98779296875, 1.818359375, 1.1552734375, 0.6015625, 1.2392578125, -1.2685546875, 0.39208984375, 0.83251953125, 0.224365234375, 0.0019989013671875, 0.87548828125, 1.6572265625, 1.107421875, 0.434814453125, 1.8251953125, 0.442626953125, 1.2587890625, 0.09320068359375, -0.896484375, 1.8017578125, 1.451171875, -0.0755615234375, 0.6083984375, 2.06640625, 0.673828125, -0.33740234375, 0.192138671875, 0.21435546875, 0.80224609375, -1.490234375, 0.9501953125, 0.86083984375, -0.40283203125, 4.109375, 2.533203125, 1.2529296875, 0.8271484375, 0.225830078125, 1.0478515625, -1.9755859375, 0.841796875, 0.392822265625, 0.525390625, 0.33935546875, -0.79443359375, 0.71630859375, 0.97998046875, -0.175537109375, 0.97705078125, 1.705078125, 0.29638671875, 0.68359375, 0.54150390625, 0.435791015625, 0.99755859375, -0.369140625, 1.009765625, -0.140380859375, 0.426513671875, 0.189697265625, 1.8193359375, 1.1201171875, -0.5009765625, -0.331298828125, 0.759765625, -0.09442138671875, 0.74609375, -1.947265625, 1.3544921875, -3.935546875, 2.544921875, 1.359375, 0.1363525390625, 0.79296875, 0.79931640625, -0.3466796875, 1.1396484375, -0.33447265625, 2.0078125, -0.241455078125, 0.6318359375, 0.365234375, 0.296142578125, 0.830078125, 1.0458984375, 0.5830078125, 0.61572265625, 14.0703125, -2.0078125, -0.381591796875, 1.228515625, 0.08282470703125, -0.67822265625, -0.04339599609375, 0.397216796875, 0.1656494140625, 0.137451171875, 0.244873046875, 1.1611328125, -1.3818359375, 0.8447265625, 1.171875, 0.36328125, 0.252685546875, 0.1197509765625, 0.232177734375, -0.020172119140625, 0.64404296875, -0.01100921630859375, -1.9267578125, 0.222412109375, 0.56005859375, 1.3046875, 1.1630859375, 1.197265625, 1.02734375, 1.6806640625, -0.043731689453125, 1.4697265625, 0.81201171875, 1.5390625, 1.240234375, -0.7353515625, 1.828125, 1.115234375, 1.931640625, -0.517578125, 0.77880859375, 1.0546875, 0.95361328125, 3.42578125, 0.0160369873046875, 0.875, 0.56005859375, 1.2421875, 1.986328125, 1.4814453125, 0.0948486328125, 1.115234375, 0.00665283203125, 2.09375, 0.3544921875, -0.52783203125, 1.2099609375, 0.45068359375, 0.65625, 0.1112060546875, 1.0751953125, -0.9521484375, -0.30029296875, 1.4462890625, 2.046875, 3.212890625, 1.68359375, 1.07421875, -0.5263671875, 0.74560546875, 1.37890625, 0.15283203125, 0.2440185546875, 0.62646484375, -0.1280517578125, 0.7646484375, -0.515625, -0.35693359375, 1.2958984375, 0.96923828125, 0.58935546875, 1.3701171875, 1.0673828125, 0.2337646484375, 0.93115234375, 0.66357421875, 6.0, 1.1025390625, -0.51708984375, -0.38330078125, 0.7197265625, 0.246826171875, -0.45166015625, 1.9521484375, 0.5546875, 0.08807373046875, 0.18505859375, 0.8857421875, -0.57177734375, 0.251708984375, 0.234375, 2.57421875, 0.9599609375, 0.5029296875, 0.10382080078125, 0.08331298828125, 0.66748046875, -0.349609375, 1.287109375, 0.259765625, 2.015625, 2.828125, -0.3095703125, -0.164306640625, -0.3408203125, 0.486572265625, 0.8466796875, 1.9130859375, 0.09088134765625, 0.66552734375, 0.00972747802734375, -0.83154296875, 1.755859375, 0.654296875, 0.173828125, 0.27587890625, -0.47607421875, -0.264404296875, 0.7529296875, 0.6533203125, 0.7275390625, 0.499755859375, 0.833984375, -0.44775390625, -0.05078125, -0.454833984375, 0.75439453125, 0.68505859375, 0.210693359375, -0.283935546875, -0.53564453125, 0.96826171875, 0.861328125, -3.33984375, -0.26171875, 0.77734375, 0.26513671875, -0.14111328125, -0.042236328125, -0.84814453125, 0.2137451171875, 0.94921875, 0.65185546875, -0.5380859375, 0.1529541015625, -0.360595703125, -0.0333251953125, -0.69189453125, 0.8974609375, 0.7109375, 0.81494140625, -0.259521484375, 1.1904296875, 0.62158203125, 1.345703125, 0.89404296875, 0.70556640625, 1.0673828125, 1.392578125, 0.5068359375, 0.962890625, 0.736328125, 1.55078125, 0.50390625, -0.398681640625, 2.361328125, 0.345947265625, -0.61962890625, 0.330078125, 0.75439453125, -0.673828125, -0.2379150390625, 1.5673828125, 1.369140625, 0.1119384765625, -0.1834716796875, 1.4599609375, -0.77587890625, 0.5556640625, 0.09954833984375, 0.0285186767578125, 0.58935546875, -0.501953125, 0.212890625, 0.02679443359375, 0.1715087890625, 0.03466796875, -0.564453125, 2.029296875, 2.45703125, -0.72216796875, 2.138671875, 0.50830078125, -0.09356689453125, 0.230224609375, 1.6943359375, 1.5126953125, 0.39453125, 0.411376953125, 1.07421875, -0.8046875, 0.51416015625, 0.2271728515625, -0.283447265625, 0.38427734375, 0.73388671875, 0.6962890625, 1.4990234375, 0.02813720703125, 0.40478515625, 1.2451171875, 1.1162109375, -5.5703125, 0.76171875, 0.322021484375, 1.0361328125, 1.197265625, 0.1163330078125, 0.2425537109375, 1.5595703125, 1.5791015625, -0.0921630859375, 0.484619140625, 1.9052734375, 5.31640625, 1.6337890625, 0.95947265625, -0.1751708984375, 0.466552734375, 0.8330078125, 1.03125, 0.2044677734375, 0.31298828125, -1.1220703125, 0.5517578125, 0.93505859375, 0.45166015625, 1.951171875, 0.65478515625, 1.30859375, 1.0859375, 0.56494140625, 2.322265625, 0.242919921875, 1.81640625, -0.469970703125, -0.841796875, 0.90869140625, 1.5361328125, 0.923828125, 1.0595703125, 0.356689453125, -0.46142578125, 2.134765625, 1.3037109375, -0.32373046875, -9.2265625, 0.4521484375, 0.88037109375, -0.53955078125, 0.96484375, 0.7705078125, 0.84521484375, 1.580078125, -0.1448974609375, 0.7607421875, 1.0166015625, -0.086669921875, 1.611328125, 0.05938720703125, 0.5078125, 0.8427734375, 2.431640625, 0.66357421875, 3.203125, 0.132080078125, 0.461181640625, 0.779296875, 1.9482421875, 1.8720703125, 0.845703125, -1.3837890625, -0.138916015625, 0.35546875, 0.2457275390625, 0.75341796875, 1.828125, 1.4169921875, 0.60791015625, 1.0068359375, 1.109375, 0.484130859375, -0.302001953125, 0.4951171875, 0.802734375, 1.9482421875, 0.916015625, 0.1646728515625, 2.599609375, 1.7177734375, -0.2374267578125, 0.98046875, 0.39306640625, -1.1396484375, 1.6533203125, 0.375244140625], \\\"scores\\\": [0.09906005859375, 0.12213134765625, 0.05035400390625, 0.16552734375, 0.0037384033203125, 0.010101318359375, 0.058197021484375, 0.10552978515625, 0.1141357421875, 0.115234375, 0.00444793701171875, 0.00812530517578125, 0.0343017578125, 0.009002685546875, 0.087158203125, 0.00818634033203125, 0.003238677978515625, 0.024169921875, 0.00702667236328125, 0.00936126708984375, 0.00632476806640625, 0.0293731689453125, 0.01800537109375, 0.0088348388671875, 0.013397216796875, 0.02557373046875, 0.12109375, 0.004413604736328125, 0.016571044921875, 0.00274658203125, 0.2142333984375, 0.29638671875, 0.16015625, 0.007843017578125, 0.007160186767578125, 0.00508880615234375, 0.054229736328125, 0.048736572265625, 0.026397705078125, 0.0265350341796875, 0.051361083984375, 0.018310546875, 0.00974273681640625, 0.0218963623046875, 0.216064453125, 0.021728515625, 0.0173797607421875, 0.004520416259765625], \\\"stoks\\\": [52, 52, 52, 52, 52, 41, 41, 374, 303, 303, 303, 427], \\\"time\\\": {\\\"begin_ms\\\": 640, \\\"end_ms\\\": 1140}}]}\"\n },\n {\n \"id\": \"7645a0d1-2e64-410d-83a8-b96040432e9a\",\n \"chat_id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"timestamp\": 1716244957031,\n \"role\": \"AGENT\",\n \"type\": \"AGENT_MESSAGE\",\n \"message_text\": \"Hello!\",\n \"emotion_features\": \"{\\\"Admiration\\\": 0.044921875, \\\"Adoration\\\": 0.0253753662109375, \\\"Aesthetic Appreciation\\\": 0.03265380859375, \\\"Amusement\\\": 0.118408203125, \\\"Anger\\\": 0.06719970703125, \\\"Anxiety\\\": 0.0411376953125, \\\"Awe\\\": 0.03802490234375, \\\"Awkwardness\\\": 0.056549072265625, \\\"Boredom\\\": 0.04217529296875, \\\"Calmness\\\": 0.08709716796875, \\\"Concentration\\\": 0.070556640625, \\\"Confusion\\\": 0.06964111328125, \\\"Contemplation\\\": 0.0343017578125, \\\"Contempt\\\": 0.037689208984375, \\\"Contentment\\\": 0.059417724609375, \\\"Craving\\\": 0.01132965087890625, \\\"Desire\\\": 0.01406097412109375, \\\"Determination\\\": 0.1143798828125, \\\"Disappointment\\\": 0.051177978515625, \\\"Disgust\\\": 0.028594970703125, \\\"Distress\\\": 0.054901123046875, \\\"Doubt\\\": 0.04638671875, \\\"Ecstasy\\\": 0.0258026123046875, \\\"Embarrassment\\\": 0.0222015380859375, \\\"Empathic Pain\\\": 0.015777587890625, \\\"Entrancement\\\": 0.0160980224609375, \\\"Envy\\\": 0.0163421630859375, \\\"Excitement\\\": 0.129638671875, \\\"Fear\\\": 0.03125, \\\"Guilt\\\": 0.01483917236328125, \\\"Horror\\\": 0.0194549560546875, \\\"Interest\\\": 0.1341552734375, \\\"Joy\\\": 0.0738525390625, \\\"Love\\\": 0.0216522216796875, \\\"Nostalgia\\\": 0.0210418701171875, \\\"Pain\\\": 0.020721435546875, \\\"Pride\\\": 0.05499267578125, \\\"Realization\\\": 0.0728759765625, \\\"Relief\\\": 0.04052734375, \\\"Romance\\\": 0.0129241943359375, \\\"Sadness\\\": 0.0254669189453125, \\\"Satisfaction\\\": 0.07159423828125, \\\"Shame\\\": 0.01495361328125, \\\"Surprise (negative)\\\": 0.05560302734375, \\\"Surprise (positive)\\\": 0.07965087890625, \\\"Sympathy\\\": 0.022247314453125, \\\"Tiredness\\\": 0.0194549560546875, \\\"Triumph\\\": 0.04107666015625}\",\n \"metadata\": \"\"\n }\n ],\n \"page_number\": 0,\n \"page_size\": 3,\n \"total_pages\": 1,\n \"end_timestamp\": 1716244958546,\n \"metadata\": \"\",\n \"config\": {\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 0\n }\n}","headers":{"Content-Type":"application/json"}},"uuid":"333ce95c-76c7-4621-aa72-bb0ed90fcf50","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"1c210e60-fc3e-4020-ba31-155c211461a5","name":"Get chat audio - default","request":{"urlPathTemplate":"/v0/evi/chats/{id}/audio","method":"GET","pathParameters":{"id":{"equalTo":"your-chat-id"}}},"response":{"status":200,"body":"{\n \"id\": \"470a49f6-1dec-4afe-8b61-035d3b2d63b0\",\n \"user_id\": \"e6235940-cfda-3988-9147-ff531627cf42\",\n \"status\": \"COMPLETE\",\n \"filename\": \"e6235940-cfda-3988-9147-ff531627cf42/470a49f6-1dec-4afe-8b61-035d3b2d63b0/reconstructed_audio.mp4\",\n \"modified_at\": 1729875432555,\n \"signed_audio_url\": \"https://storage.googleapis.com/...etc.\",\n \"signed_url_expiration_timestamp_millis\": 1730232816964\n}","headers":{"Content-Type":"application/json"}},"uuid":"1c210e60-fc3e-4020-ba31-155c211461a5","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"95990ecd-a7f2-495c-84ec-e0b2f0f1e471","name":"List configs - default","request":{"urlPathTemplate":"/v0/evi/configs","method":"GET"},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 1,\n \"total_pages\": 1,\n \"configs_page\": [\n {\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 0,\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Config\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"evi_version\": \"3\",\n \"prompt\": {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n \"voice\": {\n \"provider\": \"HUME_AI\",\n \"name\": \"Ava Song\",\n \"id\": \"5bb7de05-c8fe-426a-8fcc-ba4fc4ce9f9c\"\n },\n \"language_model\": {\n \"model_provider\": \"ANTHROPIC\",\n \"model_resource\": \"claude-3-7-sonnet-latest\",\n \"temperature\": 1\n },\n \"ellm_model\": {\n \"allow_short_responses\": false\n },\n \"tools\": [],\n \"builtin_tools\": [],\n \"event_messages\": {\n \"on_new_chat\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_inactivity_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_max_duration_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n }\n },\n \"timeouts\": {\n \"inactivity\": {\n \"enabled\": true,\n \"duration_secs\": 600\n },\n \"max_duration\": {\n \"enabled\": true,\n \"duration_secs\": 1800\n }\n }\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"95990ecd-a7f2-495c-84ec-e0b2f0f1e471","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"73a53d69-e4fb-44c0-a4cc-3ebdee8e8c36","name":"Create config - default","request":{"urlPathTemplate":"/v0/evi/configs","method":"POST"},"response":{"status":201,"body":"{\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 0,\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Config\",\n \"created_on\": 1715275452390,\n \"modified_on\": 1715275452390,\n \"evi_version\": \"3\",\n \"prompt\": {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n \"voice\": {\n \"provider\": \"HUME_AI\",\n \"name\": \"Ava Song\",\n \"id\": \"5bb7de05-c8fe-426a-8fcc-ba4fc4ce9f9c\"\n },\n \"language_model\": {\n \"model_provider\": \"ANTHROPIC\",\n \"model_resource\": \"claude-3-7-sonnet-latest\",\n \"temperature\": 1\n },\n \"ellm_model\": {\n \"allow_short_responses\": false\n },\n \"tools\": [],\n \"builtin_tools\": [],\n \"event_messages\": {\n \"on_new_chat\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_inactivity_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_max_duration_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n }\n },\n \"timeouts\": {\n \"inactivity\": {\n \"enabled\": true,\n \"duration_secs\": 600\n },\n \"max_duration\": {\n \"enabled\": true,\n \"duration_secs\": 1800\n }\n }\n}","headers":{"Content-Type":"application/json"}},"uuid":"73a53d69-e4fb-44c0-a4cc-3ebdee8e8c36","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"9fa7c906-3213-4358-9fd3-fb98a80ccff9","name":"List config versions - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}","method":"GET","pathParameters":{"id":{"equalTo":"your-config-id"}}},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 10,\n \"total_pages\": 1,\n \"configs_page\": [\n {\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 0,\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Config\",\n \"created_on\": 1715275452390,\n \"modified_on\": 1715275452390,\n \"evi_version\": \"3\",\n \"prompt\": {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n \"voice\": {\n \"provider\": \"HUME_AI\",\n \"name\": \"Ava Song\",\n \"id\": \"5bb7de05-c8fe-426a-8fcc-ba4fc4ce9f9c\"\n },\n \"language_model\": {\n \"model_provider\": \"ANTHROPIC\",\n \"model_resource\": \"claude-3-7-sonnet-latest\",\n \"temperature\": 1\n },\n \"ellm_model\": {\n \"allow_short_responses\": false\n },\n \"tools\": [],\n \"builtin_tools\": [],\n \"event_messages\": {\n \"on_new_chat\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_inactivity_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_max_duration_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n }\n },\n \"timeouts\": {\n \"inactivity\": {\n \"enabled\": true,\n \"duration_secs\": 600\n },\n \"max_duration\": {\n \"enabled\": true,\n \"duration_secs\": 1800\n }\n }\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"9fa7c906-3213-4358-9fd3-fb98a80ccff9","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"66129a1a-71df-4899-a702-a2582339ad92","name":"Create config version - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}","method":"POST","pathParameters":{"id":{"equalTo":"your-config-id"}}},"response":{"status":201,"body":"{\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 1,\n \"version_description\": \"This is an updated version of the Weather Assistant Config.\",\n \"name\": \"Weather Assistant Config\",\n \"created_on\": 1715275452390,\n \"modified_on\": 1722642242998,\n \"evi_version\": \"3\",\n \"prompt\": {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n \"voice\": {\n \"provider\": \"HUME_AI\",\n \"name\": \"Ava Song\",\n \"id\": \"5bb7de05-c8fe-426a-8fcc-ba4fc4ce9f9c\"\n },\n \"language_model\": {\n \"model_provider\": \"ANTHROPIC\",\n \"model_resource\": \"claude-3-7-sonnet-latest\",\n \"temperature\": 1\n },\n \"ellm_model\": {\n \"allow_short_responses\": true\n },\n \"tools\": [],\n \"builtin_tools\": [],\n \"event_messages\": {\n \"on_new_chat\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_inactivity_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_max_duration_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n }\n },\n \"timeouts\": {\n \"inactivity\": {\n \"enabled\": true,\n \"duration_secs\": 600\n },\n \"max_duration\": {\n \"enabled\": true,\n \"duration_secs\": 1800\n }\n }\n}","headers":{"Content-Type":"application/json"}},"uuid":"66129a1a-71df-4899-a702-a2582339ad92","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"96ab709e-8ce5-42ad-8930-11174405a07f","name":"Delete config - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}","method":"DELETE","pathParameters":{"id":{"equalTo":"your-config-id"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"96ab709e-8ce5-42ad-8930-11174405a07f","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"ac036d7f-9200-454a-ad8b-058cedce8018","name":"Update config name - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}","method":"PATCH","pathParameters":{"id":{"equalTo":"your-config-id"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"ac036d7f-9200-454a-ad8b-058cedce8018","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"336e4516-6577-4544-8e15-559222ec726d","name":"Get config version - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}/version/{version}","method":"GET","pathParameters":{"id":{"equalTo":"your-config-id"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"{\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 1,\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Config\",\n \"created_on\": 1715275452390,\n \"modified_on\": 1715275452390,\n \"evi_version\": \"3\",\n \"prompt\": {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n \"voice\": {\n \"provider\": \"HUME_AI\",\n \"name\": \"Ava Song\",\n \"id\": \"5bb7de05-c8fe-426a-8fcc-ba4fc4ce9f9c\"\n },\n \"language_model\": {\n \"model_provider\": \"ANTHROPIC\",\n \"model_resource\": \"claude-3-7-sonnet-latest\",\n \"temperature\": 1\n },\n \"ellm_model\": {\n \"allow_short_responses\": false\n },\n \"tools\": [],\n \"builtin_tools\": [],\n \"event_messages\": {\n \"on_new_chat\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_inactivity_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_max_duration_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n }\n },\n \"timeouts\": {\n \"inactivity\": {\n \"enabled\": true,\n \"duration_secs\": 600\n },\n \"max_duration\": {\n \"enabled\": true,\n \"duration_secs\": 1800\n }\n }\n}","headers":{"Content-Type":"application/json"}},"uuid":"336e4516-6577-4544-8e15-559222ec726d","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"3631f032-af11-4788-9ada-63c97ec90d03","name":"Delete config version - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}/version/{version}","method":"DELETE","pathParameters":{"id":{"equalTo":"your-config-id"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"3631f032-af11-4788-9ada-63c97ec90d03","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"4a8a419e-4772-4077-95b5-281c2bd2851b","name":"Update config description - default","request":{"urlPathTemplate":"/v0/evi/configs/{id}/version/{version}","method":"PATCH","pathParameters":{"id":{"equalTo":"your-config-id"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"{\n \"id\": \"1b60e1a0-cc59-424a-8d2c-189d354db3f3\",\n \"version\": 1,\n \"version_description\": \"This is an updated version_description.\",\n \"name\": \"Weather Assistant Config\",\n \"created_on\": 1715275452390,\n \"modified_on\": 1715275452390,\n \"evi_version\": \"3\",\n \"prompt\": {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n \"voice\": {\n \"provider\": \"HUME_AI\",\n \"name\": \"Ava Song\",\n \"id\": \"5bb7de05-c8fe-426a-8fcc-ba4fc4ce9f9c\"\n },\n \"language_model\": {\n \"model_provider\": \"ANTHROPIC\",\n \"model_resource\": \"claude-3-7-sonnet-latest\",\n \"temperature\": 1\n },\n \"ellm_model\": {\n \"allow_short_responses\": false\n },\n \"tools\": [],\n \"builtin_tools\": [],\n \"event_messages\": {\n \"on_new_chat\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_inactivity_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n },\n \"on_max_duration_timeout\": {\n \"enabled\": false,\n \"text\": \"\"\n }\n },\n \"timeouts\": {\n \"inactivity\": {\n \"enabled\": true,\n \"duration_secs\": 600\n },\n \"max_duration\": {\n \"enabled\": true,\n \"duration_secs\": 1800\n }\n }\n}","headers":{"Content-Type":"application/json"}},"uuid":"4a8a419e-4772-4077-95b5-281c2bd2851b","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"3760f9c4-fbaa-40e9-9770-35af4592adb1","name":"List prompts - default","request":{"urlPathTemplate":"/v0/evi/prompts","method":"GET"},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 2,\n \"total_pages\": 1,\n \"prompts_page\": [\n {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n },\n {\n \"id\": \"616b2b4c-a096-4445-9c23-64058b564fc2\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Web Search Assistant Prompt\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"text\": \"You are an AI web search assistant designed to help users find accurate and relevant information on the web. Respond to user queries promptly, using the built-in web search tool to retrieve up-to-date results. Present information clearly and concisely, summarizing key points where necessary. Use simple language and avoid technical jargon. If needed, provide helpful tips for refining search queries to obtain better results.\"\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"3760f9c4-fbaa-40e9-9770-35af4592adb1","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"1fab19ce-cf01-4f16-b221-518ebe235e05","name":"Create prompt - default","request":{"urlPathTemplate":"/v0/evi/prompts","method":"POST"},"response":{"status":201,"body":"{\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": null,\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1722633247488,\n \"modified_on\": 1722633247488,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"1fab19ce-cf01-4f16-b221-518ebe235e05","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"818fa71c-38b4-45da-92f3-167567201251","name":"List prompt versions - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}","method":"GET","pathParameters":{"id":{"equalTo":"your-prompt-id"}}},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 10,\n \"total_pages\": 1,\n \"prompts_page\": [\n {\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1722633247488,\n \"modified_on\": 1722633247488,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"818fa71c-38b4-45da-92f3-167567201251","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"b2978df3-51e4-4d99-b160-6287c20dda6f","name":"Create prompt version - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}","method":"POST","pathParameters":{"id":{"equalTo":"your-prompt-id"}}},"response":{"status":201,"body":"{\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 1,\n \"version_type\": \"FIXED\",\n \"version_description\": \"This is an updated version of the Weather Assistant Prompt.\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1722633247488,\n \"modified_on\": 1722635140150,\n \"text\": \"You are an updated version of an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"b2978df3-51e4-4d99-b160-6287c20dda6f","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"722d1802-e538-4d83-9720-79ff982e0011","name":"Delete prompt - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}","method":"DELETE","pathParameters":{"id":{"equalTo":"your-prompt-id"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"722d1802-e538-4d83-9720-79ff982e0011","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"6e0c3636-6d00-4e26-92d7-40460ef14c6c","name":"Update prompt name - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}","method":"PATCH","pathParameters":{"id":{"equalTo":"your-prompt-id"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"6e0c3636-6d00-4e26-92d7-40460ef14c6c","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"3812aa90-4c73-4d5b-a38b-d5ca8efde008","name":"Get prompt version - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}/version/{version}","method":"GET","pathParameters":{"id":{"equalTo":"your-prompt-id"},"version":{"equalTo":"0"}}},"response":{"status":200,"body":"{\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"\",\n \"name\": \"Weather Assistant Prompt\",\n \"created_on\": 1722633247488,\n \"modified_on\": 1722633247488,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"3812aa90-4c73-4d5b-a38b-d5ca8efde008","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"27f84b39-bfde-4b0f-a49e-fbd93767a180","name":"Delete prompt version - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}/version/{version}","method":"DELETE","pathParameters":{"id":{"equalTo":"your-prompt-id"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"27f84b39-bfde-4b0f-a49e-fbd93767a180","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"760d3795-9a2c-4a38-940a-6b7459dff285","name":"Update prompt description - default","request":{"urlPathTemplate":"/v0/evi/prompts/{id}/version/{version}","method":"PATCH","pathParameters":{"id":{"equalTo":"your-prompt-id"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"{\n \"id\": \"af699d45-2985-42cc-91b9-af9e5da3bac5\",\n \"version\": 1,\n \"version_type\": \"FIXED\",\n \"version_description\": \"This is an updated version_description.\",\n \"name\": \"string\",\n \"created_on\": 1722633247488,\n \"modified_on\": 1722634770585,\n \"text\": \"You are an AI weather assistant providing users with accurate and up-to-date weather information. Respond to user queries concisely and clearly. Use simple language and avoid technical jargon. Provide temperature, precipitation, wind conditions, and any weather alerts. Include helpful tips if severe weather is expected.\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"760d3795-9a2c-4a38-940a-6b7459dff285","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"76070823-253f-4b18-9c68-a80f6d2373ee","name":"List tools - default","request":{"urlPathTemplate":"/v0/evi/tools","method":"GET"},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 2,\n \"total_pages\": 1,\n \"tools_page\": [\n {\n \"tool_type\": \"FUNCTION\",\n \"id\": \"d20827af-5d8d-4f66-b6b9-ce2e3e1ea2b2\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches user's current location.\",\n \"name\": \"get_current_location\",\n \"created_on\": 1715267200693,\n \"modified_on\": 1715267200693,\n \"fallback_content\": \"Unable to fetch location.\",\n \"description\": \"Fetches user's current location.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }}, \\\"required\\\": [\\\"location\\\"] }\"\n },\n {\n \"tool_type\": \"FUNCTION\",\n \"id\": \"4442f3ea-9038-40e3-a2ce-1522b7de770f\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches current weather and uses celsius or fahrenheit based on location of user.\",\n \"name\": \"get_current_weather\",\n \"created_on\": 1715266126705,\n \"modified_on\": 1715266126705,\n \"fallback_content\": \"Unable to fetch location.\",\n \"description\": \"Fetches current weather and uses celsius or fahrenheit based on location of user.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }, \\\"format\\\": { \\\"type\\\": \\\"string\\\", \\\"enum\\\": [\\\"celsius\\\", \\\"fahrenheit\\\"], \\\"description\\\": \\\"The temperature unit to use. Infer this from the users location.\\\" } }, \\\"required\\\": [\\\"location\\\", \\\"format\\\"] }\"\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"76070823-253f-4b18-9c68-a80f6d2373ee","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"7dc71f1b-3d70-4527-a21f-326f7db77dec","name":"Create tool - default","request":{"urlPathTemplate":"/v0/evi/tools","method":"POST"},"response":{"status":201,"body":"{\n \"tool_type\": \"FUNCTION\",\n \"id\": \"aa9b71c4-723c-47ff-9f83-1a1829e74376\",\n \"version\": 0,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches current weather and uses celsius or fahrenheit based on location of user.\",\n \"name\": \"get_current_weather\",\n \"created_on\": 1715275452390,\n \"modified_on\": 1715275452390,\n \"fallback_content\": \"Unable to fetch current weather.\",\n \"description\": \"This tool is for getting the current weather.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }, \\\"format\\\": { \\\"type\\\": \\\"string\\\", \\\"enum\\\": [\\\"celsius\\\", \\\"fahrenheit\\\"], \\\"description\\\": \\\"The temperature unit to use. Infer this from the users location.\\\" } }, \\\"required\\\": [\\\"location\\\", \\\"format\\\"] }\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"7dc71f1b-3d70-4527-a21f-326f7db77dec","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"97f54c38-2fe1-4344-9355-88765ad39e92","name":"List tool versions - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}","method":"GET","pathParameters":{"id":{"equalTo":"your-tool-id"}}},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 10,\n \"total_pages\": 1,\n \"tools_page\": [\n {\n \"tool_type\": \"FUNCTION\",\n \"id\": \"00183a3f-79ba-413d-9f3b-609864268bea\",\n \"version\": 1,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches current weather and uses celsius, fahrenheit, or kelvin based on location of user.\",\n \"name\": \"get_current_weather\",\n \"created_on\": 1715277014228,\n \"modified_on\": 1715277602313,\n \"fallback_content\": \"Unable to fetch current weather.\",\n \"description\": \"This tool is for getting the current weather.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }, \\\"format\\\": { \\\"type\\\": \\\"string\\\", \\\"enum\\\": [\\\"celsius\\\", \\\"fahrenheit\\\", \\\"kelvin\\\"], \\\"description\\\": \\\"The temperature unit to use. Infer this from the users location.\\\" } }, \\\"required\\\": [\\\"location\\\", \\\"format\\\"] }\"\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"97f54c38-2fe1-4344-9355-88765ad39e92","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"1230a7c7-82ee-4940-98d3-3f0e1acc2cba","name":"Create tool version - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}","method":"POST","pathParameters":{"id":{"equalTo":"your-tool-id"}}},"response":{"status":201,"body":"{\n \"tool_type\": \"FUNCTION\",\n \"id\": \"00183a3f-79ba-413d-9f3b-609864268bea\",\n \"version\": 1,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches current weather and uses celsius, fahrenheit, or kelvin based on location of user.\",\n \"name\": \"get_current_weather\",\n \"created_on\": 1715277014228,\n \"modified_on\": 1715277602313,\n \"fallback_content\": \"Unable to fetch current weather.\",\n \"description\": \"This tool is for getting the current weather.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }, \\\"format\\\": { \\\"type\\\": \\\"string\\\", \\\"enum\\\": [\\\"celsius\\\", \\\"fahrenheit\\\", \\\"kelvin\\\"], \\\"description\\\": \\\"The temperature unit to use. Infer this from the users location.\\\" } }, \\\"required\\\": [\\\"location\\\", \\\"format\\\"] }\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"1230a7c7-82ee-4940-98d3-3f0e1acc2cba","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"6e58cf98-dbd8-465b-a6cc-53941c38f006","name":"Delete tool - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}","method":"DELETE","pathParameters":{"id":{"equalTo":"your-tool-id"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"6e58cf98-dbd8-465b-a6cc-53941c38f006","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"7bb920f0-9b75-4374-bdfb-540a599f3fce","name":"Update tool name - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}","method":"PATCH","pathParameters":{"id":{"equalTo":"your-tool-id"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"7bb920f0-9b75-4374-bdfb-540a599f3fce","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"f4fe78ec-6b74-402d-8aef-0cbd04c6a473","name":"Get tool version - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}/version/{version}","method":"GET","pathParameters":{"id":{"equalTo":"your-tool-id"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"{\n \"tool_type\": \"FUNCTION\",\n \"id\": \"00183a3f-79ba-413d-9f3b-609864268bea\",\n \"version\": 1,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches current weather and uses celsius, fahrenheit, or kelvin based on location of user.\",\n \"name\": \"string\",\n \"created_on\": 1715277014228,\n \"modified_on\": 1715277602313,\n \"fallback_content\": \"Unable to fetch current weather.\",\n \"description\": \"This tool is for getting the current weather.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }, \\\"format\\\": { \\\"type\\\": \\\"string\\\", \\\"enum\\\": [\\\"celsius\\\", \\\"fahrenheit\\\", \\\"kelvin\\\"], \\\"description\\\": \\\"The temperature unit to use. Infer this from the users location.\\\" } }, \\\"required\\\": [\\\"location\\\", \\\"format\\\"] }\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"f4fe78ec-6b74-402d-8aef-0cbd04c6a473","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"74ef73ce-49e2-492e-a4cb-ea6a1dc9b948","name":"Delete tool version - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}/version/{version}","method":"DELETE","pathParameters":{"version":{"equalTo":"1"}}},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"74ef73ce-49e2-492e-a4cb-ea6a1dc9b948","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"82d8c062-5280-4378-afad-8057ebea037a","name":"Update tool description - default","request":{"urlPathTemplate":"/v0/evi/tools/{id}/version/{version}","method":"PATCH","pathParameters":{"id":{"equalTo":"your-tool-id"},"version":{"equalTo":"1"}}},"response":{"status":200,"body":"{\n \"tool_type\": \"FUNCTION\",\n \"id\": \"00183a3f-79ba-413d-9f3b-609864268bea\",\n \"version\": 1,\n \"version_type\": \"FIXED\",\n \"version_description\": \"Fetches current temperature, precipitation, wind speed, AQI, and other weather conditions. Uses Celsius, Fahrenheit, or kelvin depending on user's region.\",\n \"name\": \"string\",\n \"created_on\": 1715277014228,\n \"modified_on\": 1715277602313,\n \"fallback_content\": \"Unable to fetch current weather.\",\n \"description\": \"This tool is for getting the current weather.\",\n \"parameters\": \"{ \\\"type\\\": \\\"object\\\", \\\"properties\\\": { \\\"location\\\": { \\\"type\\\": \\\"string\\\", \\\"description\\\": \\\"The city and state, e.g. San Francisco, CA\\\" }, \\\"format\\\": { \\\"type\\\": \\\"string\\\", \\\"enum\\\": [\\\"celsius\\\", \\\"fahrenheit\\\", \\\"kelvin\\\"], \\\"description\\\": \\\"The temperature unit to use. Infer this from the users location.\\\" } }, \\\"required\\\": [\\\"location\\\", \\\"format\\\"] }\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"82d8c062-5280-4378-afad-8057ebea037a","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"1d60b8ea-f512-4ce0-92ad-0a086a4717a2","name":"List voices - default","request":{"urlPathTemplate":"/v0/tts/voices","method":"GET"},"response":{"status":200,"body":"{\n \"page_number\": 0,\n \"page_size\": 10,\n \"total_pages\": 1,\n \"voices_page\": [\n {\n \"id\": \"c42352c0-4566-455d-b180-0f654b65b525\",\n \"name\": \"David Hume\",\n \"provider\": \"CUSTOM_VOICE\"\n },\n {\n \"id\": \"d87352b0-26a3-4b11-081b-d157a5674d19\",\n \"name\": \"Goliath Hume\",\n \"provider\": \"CUSTOM_VOICE\"\n }\n ]\n}","headers":{"Content-Type":"application/json"}},"uuid":"1d60b8ea-f512-4ce0-92ad-0a086a4717a2","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"2c434990-d686-4fec-940d-7b86327bf9d5","name":"Create voice - default","request":{"urlPathTemplate":"/v0/tts/voices","method":"POST"},"response":{"status":200,"body":"{\n \"id\": \"c42352c0-4566-455d-b180-0f654b65b525\",\n \"name\": \"David Hume\",\n \"provider\": \"CUSTOM_VOICE\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"2c434990-d686-4fec-940d-7b86327bf9d5","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"69163bef-50c2-4c89-b4f9-70f5a04bfec8","name":"Delete voice - default","request":{"urlPathTemplate":"/v0/tts/voices","method":"DELETE"},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"69163bef-50c2-4c89-b4f9-70f5a04bfec8","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"1a9089ce-a462-43bb-afa5-ed028dd296e5","name":"Text-to-Speech (Json) - default","request":{"urlPathTemplate":"/v0/tts","method":"POST"},"response":{"status":200,"body":"{\n \"generations\": [\n {\n \"audio\": \"//PExAA0DDYRvkpNfhv3JI5JZ...etc.\",\n \"duration\": 7.44225,\n \"encoding\": {\n \"format\": \"mp3\",\n \"sample_rate\": 48000\n },\n \"file_size\": 120192,\n \"generation_id\": \"795c949a-1510-4a80-9646-7d0863b023ab\",\n \"snippets\": [\n [\n {\n \"audio\": \"//PExAA0DDYRvkpNfhv3JI5JZ...etc.\",\n \"generation_id\": \"795c949a-1510-4a80-9646-7d0863b023ab\",\n \"id\": \"37b1b1b1-1b1b-1b1b-1b1b-1b1b1b1b1b1b\",\n \"text\": \"Beauty is no quality in things themselves: It exists merely in the mind which contemplates them.\",\n \"utterance_index\": 0,\n \"timestamps\": []\n }\n ]\n ]\n }\n ],\n \"request_id\": \"66e01f90-4501-4aa0-bbaf-74f45dc15aa725906\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"1a9089ce-a462-43bb-afa5-ed028dd296e5","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"449bb149-6027-4735-a265-0a0a5bc0d0ef","name":"Text-to-Speech (File) - default","request":{"urlPathTemplate":"/v0/tts/file","method":"POST"},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"449bb149-6027-4735-a265-0a0a5bc0d0ef","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"fd6bbe04-a38a-4d6e-bc3a-f8cf25725dbe","name":"Text-to-Speech (Streamed File) - default","request":{"urlPathTemplate":"/v0/tts/stream/file","method":"POST"},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"fd6bbe04-a38a-4d6e-bc3a-f8cf25725dbe","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"8e8e8262-38fd-4f81-836b-c9d7ee07cd84","name":"Text-to-Speech (Streamed JSON) - default","request":{"urlPathTemplate":"/v0/tts/stream/json","method":"POST"},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"8e8e8262-38fd-4f81-836b-c9d7ee07cd84","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"f75e0a57-c1d9-44df-80c7-c8485a7a76d6","name":"Voice Conversion (Streamed JSON) - default","request":{"urlPathTemplate":"/v0/tts/voice_conversion/json","method":"POST"},"response":{"status":200,"body":"\"\"","headers":{"Content-Type":"application/json"}},"uuid":"f75e0a57-c1d9-44df-80c7-c8485a7a76d6","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"ac2d855e-1240-4aa0-88e2-61efb0c0026a","name":"List jobs - default","request":{"urlPathTemplate":"/v0/batch/jobs","method":"GET"},"response":{"status":200,"body":"[\n {\n \"job_id\": \"job_id\",\n \"request\": {\n \"callback_url\": null,\n \"files\": [\n {\n \"filename\": \"filename\",\n \"md5sum\": \"md5sum\",\n \"content_type\": \"content_type\"\n }\n ],\n \"models\": {\n \"burst\": {},\n \"face\": {\n \"descriptions\": null,\n \"facs\": null,\n \"fps_pred\": 3,\n \"identify_faces\": false,\n \"min_face_size\": 60,\n \"prob_threshold\": 0.99,\n \"save_faces\": false\n },\n \"facemesh\": {},\n \"language\": {\n \"granularity\": \"word\",\n \"identify_speakers\": false,\n \"sentiment\": null,\n \"toxicity\": null\n },\n \"ner\": {\n \"identify_speakers\": false\n },\n \"prosody\": {\n \"granularity\": \"utterance\",\n \"identify_speakers\": false,\n \"window\": null\n }\n },\n \"notify\": true,\n \"text\": [],\n \"urls\": [\n \"https://hume-tutorials.s3.amazonaws.com/faces.zip\"\n ]\n },\n \"state\": {\n \"created_timestamp_ms\": 1712587158717,\n \"ended_timestamp_ms\": 1712587159274,\n \"num_errors\": 0,\n \"num_predictions\": 10,\n \"started_timestamp_ms\": 1712587158800,\n \"status\": \"COMPLETED\"\n },\n \"type\": \"INFERENCE\"\n }\n]","headers":{"Content-Type":"application/json"}},"uuid":"ac2d855e-1240-4aa0-88e2-61efb0c0026a","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}},"postServeActions":[]},{"id":"53d86d3c-c150-433f-ab12-ef8e8e9210eb","name":"Start inference job - default","request":{"urlPathTemplate":"/v0/batch/jobs","method":"POST"},"response":{"status":200,"body":"{\n \"job_id\": \"job_id\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"53d86d3c-c150-433f-ab12-ef8e8e9210eb","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"b5838b5a-8fe8-4d6b-9439-5e77b6a8b017","name":"Get job details - default","request":{"urlPathTemplate":"/v0/batch/jobs/{id}","method":"GET","pathParameters":{"id":{"equalTo":"job_id"}}},"response":{"status":200,"body":"{\n \"type\": \"INFERENCE\",\n \"job_id\": \"job_id\",\n \"request\": {\n \"callback_url\": null,\n \"files\": [],\n \"models\": {\n \"burst\": {},\n \"face\": {\n \"descriptions\": null,\n \"facs\": null,\n \"fps_pred\": 3,\n \"identify_faces\": false,\n \"min_face_size\": 60,\n \"prob_threshold\": 0.99,\n \"save_faces\": false\n },\n \"facemesh\": {},\n \"language\": {\n \"granularity\": \"word\",\n \"identify_speakers\": false,\n \"sentiment\": null,\n \"toxicity\": null\n },\n \"ner\": {\n \"identify_speakers\": false\n },\n \"prosody\": {\n \"granularity\": \"utterance\",\n \"identify_speakers\": false,\n \"window\": null\n }\n },\n \"notify\": true,\n \"text\": [],\n \"urls\": [\n \"https://hume-tutorials.s3.amazonaws.com/faces.zip\"\n ]\n },\n \"state\": {\n \"created_timestamp_ms\": 1712590457884,\n \"ended_timestamp_ms\": 1712590462252,\n \"num_errors\": 0,\n \"num_predictions\": 10,\n \"started_timestamp_ms\": 1712590457995,\n \"status\": \"COMPLETED\"\n }\n}","headers":{"Content-Type":"application/json"}},"uuid":"b5838b5a-8fe8-4d6b-9439-5e77b6a8b017","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"27496704-1278-4ec0-804b-5b06a9ddad44","name":"Get job predictions - default","request":{"urlPathTemplate":"/v0/batch/jobs/{id}/predictions","method":"GET","pathParameters":{"id":{"equalTo":"job_id"}}},"response":{"status":200,"body":"[\n {\n \"source\": {\n \"type\": \"url\",\n \"url\": \"https://hume-tutorials.s3.amazonaws.com/faces.zip\"\n },\n \"results\": {\n \"predictions\": [\n {\n \"file\": \"faces/100.jpg\",\n \"models\": {\n \"face\": {\n \"metadata\": null,\n \"grouped_predictions\": [\n {\n \"id\": \"unknown\",\n \"predictions\": [\n {\n \"frame\": 0,\n \"time\": 0,\n \"prob\": 0.9994111061096191,\n \"box\": {\n \"x\": 1187.885986328125,\n \"y\": 1397.697509765625,\n \"w\": 1401.668701171875,\n \"h\": 1961.424560546875\n },\n \"emotions\": [\n {\n \"name\": \"Admiration\",\n \"score\": 0.10722749680280685\n },\n {\n \"name\": \"Adoration\",\n \"score\": 0.06395940482616425\n },\n {\n \"name\": \"Aesthetic Appreciation\",\n \"score\": 0.05811462551355362\n },\n {\n \"name\": \"Amusement\",\n \"score\": 0.14187128841876984\n },\n {\n \"name\": \"Anger\",\n \"score\": 0.02804684266448021\n },\n {\n \"name\": \"Anxiety\",\n \"score\": 0.2713485360145569\n },\n {\n \"name\": \"Awe\",\n \"score\": 0.33812594413757324\n },\n {\n \"name\": \"Awkwardness\",\n \"score\": 0.1745193600654602\n },\n {\n \"name\": \"Boredom\",\n \"score\": 0.23600080609321594\n },\n {\n \"name\": \"Calmness\",\n \"score\": 0.18988418579101562\n },\n {\n \"name\": \"Concentration\",\n \"score\": 0.44288986921310425\n },\n {\n \"name\": \"Confusion\",\n \"score\": 0.39346569776535034\n },\n {\n \"name\": \"Contemplation\",\n \"score\": 0.31002455949783325\n },\n {\n \"name\": \"Contempt\",\n \"score\": 0.048870109021663666\n },\n {\n \"name\": \"Contentment\",\n \"score\": 0.0579497292637825\n },\n {\n \"name\": \"Craving\",\n \"score\": 0.06544201076030731\n },\n {\n \"name\": \"Desire\",\n \"score\": 0.05526508390903473\n },\n {\n \"name\": \"Determination\",\n \"score\": 0.08590991795063019\n },\n {\n \"name\": \"Disappointment\",\n \"score\": 0.19508258998394012\n },\n {\n \"name\": \"Disgust\",\n \"score\": 0.031529419124126434\n },\n {\n \"name\": \"Distress\",\n \"score\": 0.23210826516151428\n },\n {\n \"name\": \"Doubt\",\n \"score\": 0.3284550905227661\n },\n {\n \"name\": \"Ecstasy\",\n \"score\": 0.040716782212257385\n },\n {\n \"name\": \"Embarrassment\",\n \"score\": 0.1467227339744568\n },\n {\n \"name\": \"Empathic Pain\",\n \"score\": 0.07633581757545471\n },\n {\n \"name\": \"Entrancement\",\n \"score\": 0.16245244443416595\n },\n {\n \"name\": \"Envy\",\n \"score\": 0.03267110139131546\n },\n {\n \"name\": \"Excitement\",\n \"score\": 0.10656816512346268\n },\n {\n \"name\": \"Fear\",\n \"score\": 0.3115977346897125\n },\n {\n \"name\": \"Guilt\",\n \"score\": 0.11615975946187973\n },\n {\n \"name\": \"Horror\",\n \"score\": 0.19795553386211395\n },\n {\n \"name\": \"Interest\",\n \"score\": 0.3136432468891144\n },\n {\n \"name\": \"Joy\",\n \"score\": 0.06285581737756729\n },\n {\n \"name\": \"Love\",\n \"score\": 0.06339752674102783\n },\n {\n \"name\": \"Nostalgia\",\n \"score\": 0.05866732448339462\n },\n {\n \"name\": \"Pain\",\n \"score\": 0.07684041559696198\n },\n {\n \"name\": \"Pride\",\n \"score\": 0.026822954416275024\n },\n {\n \"name\": \"Realization\",\n \"score\": 0.30000734329223633\n },\n {\n \"name\": \"Relief\",\n \"score\": 0.04414166510105133\n },\n {\n \"name\": \"Romance\",\n \"score\": 0.042728863656520844\n },\n {\n \"name\": \"Sadness\",\n \"score\": 0.14773206412792206\n },\n {\n \"name\": \"Satisfaction\",\n \"score\": 0.05902980640530586\n },\n {\n \"name\": \"Shame\",\n \"score\": 0.08103451132774353\n },\n {\n \"name\": \"Surprise (negative)\",\n \"score\": 0.25518184900283813\n },\n {\n \"name\": \"Surprise (positive)\",\n \"score\": 0.28845661878585815\n },\n {\n \"name\": \"Sympathy\",\n \"score\": 0.062488824129104614\n },\n {\n \"name\": \"Tiredness\",\n \"score\": 0.1559651643037796\n },\n {\n \"name\": \"Triumph\",\n \"score\": 0.01955239288508892\n }\n ],\n \"facs\": null,\n \"descriptions\": null\n }\n ]\n }\n ]\n }\n }\n }\n ],\n \"errors\": []\n }\n }\n]","headers":{"Content-Type":"application/json"}},"uuid":"27496704-1278-4ec0-804b-5b06a9ddad44","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"12d3146e-fa86-4658-bfca-755d6e939757","name":"Start inference job from local file - default","request":{"urlPathTemplate":"/v0/batch/jobs","method":"POST"},"response":{"status":200,"body":"{\n \"job_id\": \"job_id\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"12d3146e-fa86-4658-bfca-755d6e939757","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}}],"meta":{"total":48}}
\ No newline at end of file