From 93a1efe801760af04abe1519e0260163666ba0ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Josef=20V=C3=A1lek?= Date: Thu, 5 Feb 2026 15:34:05 +0100 Subject: [PATCH 1/7] feat(docs): Multiple datasets --- .../dataset_schema/multiple_datasets.md | 86 +++++++++++++++++++ .../environment_variables.md | 1 + 2 files changed, 87 insertions(+) create mode 100644 sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md diff --git a/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md b/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md new file mode 100644 index 0000000000..ed2c75e37d --- /dev/null +++ b/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md @@ -0,0 +1,86 @@ +--- +title: Multiple datasets +description: Learn how to use multiple datasets within your Actors to organize and store different types of data separately. +slug: /actors/development/actor-definition/dataset-schema/multiple-datasets +--- + +**Specify datasets with different structure.** + +--- + +Some Actors produce data with different structure. In some cases, it's convenient to store the data in separate datasets, instead of pushing all data to the default one. Multiple datasets allow to specify those datasets upfront and enforce validation rules. + +New datasets are created when the run starts, and follow it's data-retention. + + +## Defining multiple datasets + +The multiple datasets may defined in Actor schema using `datasets` object: + +```json title=".actor/actor.json" +{ + "actorSpecification": 1, + "name": "this-is-book-library-scraper", + "title": "Book Library scraper", + "version": "1.0.0", + "storages": { + "datasets": { + "products": "./products_dataset_schema.json", + "categories": "./categories_dataset_schema.json" + } + } +} +``` +Schemas of individual datasets can be provided as a file reference or inlined. + +The keys of the `datasets` objects are **aliases**, which can be used to refer to the specific datasets. In the example above, we have two datasets, aliased as `products` and `categories`. + +:::info + +Alias and **name** are not the same thing. Named datasets have specific behavior in Apify platform (eg, the automatic data retention policy does not apply to them). Aliased datasets follow the data retention of their respective run. Aliases stay local to the run they belong to. + +::: + +The `datasets` object has to contain at least one dataset. The first one specified is treated as the default dataset for all purposes where a default dataset is needed. For this reason, it's automatically aliased also as `default`. In the example above, the `products` dataset is going to be used as the default one. + +The `datasets` and `dataset` objects are mutually exclusive, the schema can only contain one. + +## Accessing the datasets in Actor code + +Mapping of aliases to the IDs is passed to the Actor in JSON encoded `ACTOR_STORAGE_IDS` environment variable. + +```javascript +const storageIds = JSON.parse(process.env.ACTOR_STORAGE_IDS) +const productsDataset = await Actor.openDataset(storageIds.datasets.products); +``` + +Incoming SDK support: + +```javascript +const productsDataset = await Actor.openDataset({alias: 'products'}); +``` + +## Showing data to users + +Actors with output schema can refer to the datasets through variables using aliases: + +```json +{ + "actorOutputSchemaVersion": 1, + "title": "Output schema", + "properties": { + "products": { + "type": "string", + "title": "Products", + "template": "{{storages.datasets.products.apiUrl}}/items" + }, + "categories": { + "type": "string", + "title": "Categories", + "template": "{{storages.datasets.categories.apiUrl}}/items" + } + } +} +``` + +- TODO: Rely on default display \ No newline at end of file diff --git a/sources/platform/actors/development/programming_interface/environment_variables.md b/sources/platform/actors/development/programming_interface/environment_variables.md index b295748a64..efc1a3b727 100644 --- a/sources/platform/actors/development/programming_interface/environment_variables.md +++ b/sources/platform/actors/development/programming_interface/environment_variables.md @@ -44,6 +44,7 @@ Here's a table of key system environment variables: | `ACTOR_BUILD_TAGS` | A comma-separated list of tags of the Actor build used in the run. Note that this environment variable is assigned at the time of start of the Actor and doesn't change over time, even if the assigned build tags change. | | `ACTOR_TASK_ID` | ID of the Actor task. Empty if Actor is run outside of any task, e.g. directly using the API. | | `ACTOR_EVENTS_WEBSOCKET_URL` | Websocket URL where Actor may listen for [events](/platform/actors/development/programming-interface/system-events) from Actor platform. | +| `ACTOR_STORAGE_IDS` | JSON encoded unique identifiers of storages associated with the current Actor run | | `ACTOR_DEFAULT_DATASET_ID` | Unique identifier for the default dataset associated with the current Actor run. | | `ACTOR_DEFAULT_KEY_VALUE_STORE_ID` | Unique identifier for the default key-value store associated with the current Actor run. | | `ACTOR_DEFAULT_REQUEST_QUEUE_ID` | Unique identifier for the default request queue associated with the current Actor run. | From 29d954f3f207f1237c893ebce2f9e22b5a23bc3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Josef=20V=C3=A1lek?= Date: Thu, 5 Feb 2026 15:36:21 +0100 Subject: [PATCH 2/7] Added todos --- .../actor_definition/dataset_schema/multiple_datasets.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md b/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md index ed2c75e37d..a3d8971eec 100644 --- a/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md +++ b/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md @@ -83,4 +83,5 @@ Actors with output schema can refer to the datasets through variables using alia } ``` -- TODO: Rely on default display \ No newline at end of file +- TODO: Rely on default display +- TODO: Behavior in billing From 9bd61b87a5f82839e5c49e571c3694d4ed9fbdf2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Josef=20V=C3=A1lek?= Date: Mon, 9 Feb 2026 10:36:53 +0100 Subject: [PATCH 3/7] Update variable name --- .../actor_definition/dataset_schema/multiple_datasets.md | 4 ++-- .../programming_interface/environment_variables.md | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md b/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md index a3d8971eec..6add7944e1 100644 --- a/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md +++ b/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md @@ -47,10 +47,10 @@ The `datasets` and `dataset` objects are mutually exclusive, the schema can only ## Accessing the datasets in Actor code -Mapping of aliases to the IDs is passed to the Actor in JSON encoded `ACTOR_STORAGE_IDS` environment variable. +Mapping of aliases to the IDs is passed to the Actor in JSON encoded `ACTOR_STORAGES_JSON` environment variable. ```javascript -const storageIds = JSON.parse(process.env.ACTOR_STORAGE_IDS) +const storageIds = JSON.parse(process.env.ACTOR_STORAGES_JSON) const productsDataset = await Actor.openDataset(storageIds.datasets.products); ``` diff --git a/sources/platform/actors/development/programming_interface/environment_variables.md b/sources/platform/actors/development/programming_interface/environment_variables.md index efc1a3b727..92df9b7955 100644 --- a/sources/platform/actors/development/programming_interface/environment_variables.md +++ b/sources/platform/actors/development/programming_interface/environment_variables.md @@ -44,7 +44,7 @@ Here's a table of key system environment variables: | `ACTOR_BUILD_TAGS` | A comma-separated list of tags of the Actor build used in the run. Note that this environment variable is assigned at the time of start of the Actor and doesn't change over time, even if the assigned build tags change. | | `ACTOR_TASK_ID` | ID of the Actor task. Empty if Actor is run outside of any task, e.g. directly using the API. | | `ACTOR_EVENTS_WEBSOCKET_URL` | Websocket URL where Actor may listen for [events](/platform/actors/development/programming-interface/system-events) from Actor platform. | -| `ACTOR_STORAGE_IDS` | JSON encoded unique identifiers of storages associated with the current Actor run | +| `ACTOR_STORAGES_JSON` | JSON encoded unique identifiers of storages associated with the current Actor run | | `ACTOR_DEFAULT_DATASET_ID` | Unique identifier for the default dataset associated with the current Actor run. | | `ACTOR_DEFAULT_KEY_VALUE_STORE_ID` | Unique identifier for the default key-value store associated with the current Actor run. | | `ACTOR_DEFAULT_REQUEST_QUEUE_ID` | Unique identifier for the default request queue associated with the current Actor run. | From dbf37acf66bf3c0439e3b064923fc7395ae73dbe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Josef=20V=C3=A1lek?= Date: Thu, 12 Feb 2026 11:39:32 +0100 Subject: [PATCH 4/7] Address changes --- .../dataset_schema/multiple_datasets.md | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md b/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md index 6add7944e1..b20cc7bd56 100644 --- a/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md +++ b/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md @@ -25,7 +25,7 @@ The multiple datasets may defined in Actor schema using `datasets` object: "version": "1.0.0", "storages": { "datasets": { - "products": "./products_dataset_schema.json", + "default": "./products_dataset_schema.json", "categories": "./categories_dataset_schema.json" } } @@ -33,7 +33,7 @@ The multiple datasets may defined in Actor schema using `datasets` object: ``` Schemas of individual datasets can be provided as a file reference or inlined. -The keys of the `datasets` objects are **aliases**, which can be used to refer to the specific datasets. In the example above, we have two datasets, aliased as `products` and `categories`. +The keys of the `datasets` objects are **aliases**, which can be used to refer to the specific datasets. In the example above, we have two datasets, aliased as `default` and `categories`. :::info @@ -41,7 +41,7 @@ Alias and **name** are not the same thing. Named datasets have specific behavior ::: -The `datasets` object has to contain at least one dataset. The first one specified is treated as the default dataset for all purposes where a default dataset is needed. For this reason, it's automatically aliased also as `default`. In the example above, the `products` dataset is going to be used as the default one. +The `datasets` object has to contain the `default` alias. The `datasets` and `dataset` objects are mutually exclusive, the schema can only contain one. @@ -51,13 +51,17 @@ Mapping of aliases to the IDs is passed to the Actor in JSON encoded `ACTOR_STOR ```javascript const storageIds = JSON.parse(process.env.ACTOR_STORAGES_JSON) -const productsDataset = await Actor.openDataset(storageIds.datasets.products); +const defaultDataset = await Actor.openDataset(); +// For the default dataset, it's also possible to use the following syntax: +// const defaultDataset = await Actor.openDataset(storageIds.datasets.default); +const categoriesDataset = await Actor.openDataset(storageIds.datasets.categories); + ``` Incoming SDK support: ```javascript -const productsDataset = await Actor.openDataset({alias: 'products'}); +const categoriesDataset = await Actor.openDataset({alias: 'categories'}); ``` ## Showing data to users @@ -72,7 +76,7 @@ Actors with output schema can refer to the datasets through variables using alia "products": { "type": "string", "title": "Products", - "template": "{{storages.datasets.products.apiUrl}}/items" + "template": "{{storages.datasets.default.apiUrl}}/items" }, "categories": { "type": "string", From da0b5f0fa1dcb84ef0d93cabcd775c4804a005bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Josef=20V=C3=A1lek?= Date: Tue, 24 Feb 2026 11:59:57 +0100 Subject: [PATCH 5/7] Comments --- .../dataset_schema/multiple_datasets.md | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md b/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md index b20cc7bd56..ba79c9185b 100644 --- a/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md +++ b/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md @@ -58,12 +58,20 @@ const categoriesDataset = await Actor.openDataset(storageIds.datasets.categories ``` -Incoming SDK support: +```sh +echo $ACTOR_STORAGES_JSON | jq '.datasets.categories' +``` + +Support for JS and Python SDKs is incoming, the expected syntax is following: ```javascript const categoriesDataset = await Actor.openDataset({alias: 'categories'}); ``` +```python +categories_dataset = await Actor.open_dataset(alias='categories') +``` + ## Showing data to users Actors with output schema can refer to the datasets through variables using aliases: @@ -87,5 +95,8 @@ Actors with output schema can refer to the datasets through variables using alia } ``` +## Billing implications + +The `apify-default-dataset-item` synthetic event is only charged for items in dataset aliased as `default`. Charging for items in other datasets needs to be implemented in the Actor code. + - TODO: Rely on default display -- TODO: Behavior in billing From 97effaca86936f5834f5db038ab4c6400d3e3bb7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Josef=20V=C3=A1lek?= Date: Tue, 3 Mar 2026 10:50:11 +0100 Subject: [PATCH 6/7] Mention UI --- .../dataset_schema/multiple_datasets.md | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md b/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md index ba79c9185b..80f5abf4a4 100644 --- a/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md +++ b/sources/platform/actors/development/actor_definition/dataset_schema/multiple_datasets.md @@ -20,8 +20,8 @@ The multiple datasets may defined in Actor schema using `datasets` object: ```json title=".actor/actor.json" { "actorSpecification": 1, - "name": "this-is-book-library-scraper", - "title": "Book Library scraper", + "name": "this-is-e-commerce-scraper", + "title": "E-Commerce Scraper", "version": "1.0.0", "storages": { "datasets": { @@ -74,6 +74,14 @@ categories_dataset = await Actor.open_dataset(alias='categories') ## Showing data to users +### Run Storages tab + +The Storage tab of Actor run view is displaying all the dataset defined by Actor and datasets that were used by the run (up to some limit). + +This makes the data accessible, but not very user-friendly. To make the datasets more accessible to users, use output schema. + +### Output schema + Actors with output schema can refer to the datasets through variables using aliases: ```json @@ -98,5 +106,3 @@ Actors with output schema can refer to the datasets through variables using alia ## Billing implications The `apify-default-dataset-item` synthetic event is only charged for items in dataset aliased as `default`. Charging for items in other datasets needs to be implemented in the Actor code. - -- TODO: Rely on default display From 652317c475ec934b5ecf713bbc38e6f3cc3dde3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Josef=20V=C3=A1lek?= Date: Tue, 3 Mar 2026 11:25:25 +0100 Subject: [PATCH 7/7] API docs --- .../components/schemas/actor-runs/Run.yaml | 34 +++++++++++++++++++ .../paths/actor-runs/actor-runs@{runId}.yaml | 14 ++++++++ .../actor-runs/actor-runs@{runId}@abort.yaml | 7 ++++ .../actor-runs@{runId}@metamorph.yaml | 7 ++++ .../actor-runs/actor-runs@{runId}@reboot.yaml | 7 ++++ .../actor-runs@{runId}@resurrect.yaml | 7 ++++ .../actor-tasks@{actorTaskId}@runs.yaml | 7 ++++ .../actor-tasks@{actorTaskId}@runs@last.yaml | 7 ++++ .../paths/actors/acts@{actorId}@runs.yaml | 7 ++++ .../actors/acts@{actorId}@runs@last.yaml | 7 ++++ .../actors/acts@{actorId}@runs@{runId}.yaml | 7 ++++ .../acts@{actorId}@runs@{runId}@abort.yaml | 7 ++++ ...acts@{actorId}@runs@{runId}@metamorph.yaml | 7 ++++ ...acts@{actorId}@runs@{runId}@resurrect.yaml | 7 ++++ 14 files changed, 132 insertions(+) diff --git a/apify-api/openapi/components/schemas/actor-runs/Run.yaml b/apify-api/openapi/components/schemas/actor-runs/Run.yaml index e950d4ff9d..9ccc75b47d 100644 --- a/apify-api/openapi/components/schemas/actor-runs/Run.yaml +++ b/apify-api/openapi/components/schemas/actor-runs/Run.yaml @@ -97,6 +97,40 @@ properties: type: string examples: [FL35cSF7jrxr3BY39] description: ID of the default request queue for this run. + storageIds: + type: object + description: Storage IDs associated with this run, organized by storage type. + properties: + datasets: + type: object + description: Dataset storage IDs. + properties: + default: + type: string + examples: [wmKPijuyDnPZAPRMk] + description: ID of the default dataset for this run. + additionalProperties: + type: string + keyValueStores: + type: object + description: Key-value store storage IDs. + properties: + default: + type: string + examples: [eJNzqsbPiopwJcgGQ] + description: ID of the default key-value store for this run. + additionalProperties: + type: string + requestQueues: + type: object + description: Request queue storage IDs. + properties: + default: + type: string + examples: [FL35cSF7jrxr3BY39] + description: ID of the default request queue for this run. + additionalProperties: + type: string buildNumber: type: string examples: [0.0.36] diff --git a/apify-api/openapi/paths/actor-runs/actor-runs@{runId}.yaml b/apify-api/openapi/paths/actor-runs/actor-runs@{runId}.yaml index a6bd87858f..fce9ce2eb5 100644 --- a/apify-api/openapi/paths/actor-runs/actor-runs@{runId}.yaml +++ b/apify-api/openapi/paths/actor-runs/actor-runs@{runId}.yaml @@ -148,6 +148,13 @@ get: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 buildNumber: 0.0.36 containerUrl: "https://g8kd8kbc5ge8.runs.apify.net" isContainerServerReady: true @@ -292,6 +299,13 @@ put: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 buildNumber: 0.0.36 containerUrl: "https://g8kd8kbc5ge8.runs.apify.net" isContainerServerReady: true diff --git a/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@abort.yaml b/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@abort.yaml index 0419c9fa67..e73ab8bc9d 100644 --- a/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@abort.yaml +++ b/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@abort.yaml @@ -80,6 +80,13 @@ post: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 isContainerServerReady: false gitBranchName: master usage: diff --git a/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@metamorph.yaml b/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@metamorph.yaml index 61de7b46d6..36339a35f6 100644 --- a/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@metamorph.yaml +++ b/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@metamorph.yaml @@ -101,6 +101,13 @@ post: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 metamorphs: - createdAt: "2019-11-30T07:39:24.202Z" actorId: nspoEjklmnsF2oosD diff --git a/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@reboot.yaml b/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@reboot.yaml index e4e6adb00f..f2b23d4735 100644 --- a/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@reboot.yaml +++ b/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@reboot.yaml @@ -71,6 +71,13 @@ post: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 buildNumber: 0.0.36 containerUrl: "https://g8kd8kbc5ge8.runs.apify.net" isContainerServerReady: true diff --git a/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@resurrect.yaml b/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@resurrect.yaml index c639fe92d8..c3ce09560a 100644 --- a/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@resurrect.yaml +++ b/apify-api/openapi/paths/actor-runs/actor-runs@{runId}@resurrect.yaml @@ -100,6 +100,13 @@ post: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 buildNumber: 0.0.36 containerUrl: "https://g8kd8kbc5ge8.runs.apify.net" isContainerServerReady: true diff --git a/apify-api/openapi/paths/actor-tasks/actor-tasks@{actorTaskId}@runs.yaml b/apify-api/openapi/paths/actor-tasks/actor-tasks@{actorTaskId}@runs.yaml index 684bec8375..60ac2ffef1 100644 --- a/apify-api/openapi/paths/actor-tasks/actor-tasks@{actorTaskId}@runs.yaml +++ b/apify-api/openapi/paths/actor-tasks/actor-tasks@{actorTaskId}@runs.yaml @@ -316,6 +316,13 @@ post: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 buildNumber: 0.2.2 containerUrl: "https://nwfcc4btrgqt.runs.apify.com" isContainerServerReady: false diff --git a/apify-api/openapi/paths/actor-tasks/actor-tasks@{actorTaskId}@runs@last.yaml b/apify-api/openapi/paths/actor-tasks/actor-tasks@{actorTaskId}@runs@last.yaml index b85a53c2fa..6beea31a7e 100644 --- a/apify-api/openapi/paths/actor-tasks/actor-tasks@{actorTaskId}@runs@last.yaml +++ b/apify-api/openapi/paths/actor-tasks/actor-tasks@{actorTaskId}@runs@last.yaml @@ -123,6 +123,13 @@ get: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 buildNumber: 0.0.36 containerUrl: "https://g8kd8kbc5ge8.runs.apify.net" isContainerServerReady: true diff --git a/apify-api/openapi/paths/actors/acts@{actorId}@runs.yaml b/apify-api/openapi/paths/actors/acts@{actorId}@runs.yaml index 244936cd64..476f08c45c 100644 --- a/apify-api/openapi/paths/actors/acts@{actorId}@runs.yaml +++ b/apify-api/openapi/paths/actors/acts@{actorId}@runs.yaml @@ -333,6 +333,13 @@ post: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 buildNumber: 0.0.36 containerUrl: "https://g8kd8kbc5ge8.runs.apify.net" isContainerServerReady: true diff --git a/apify-api/openapi/paths/actors/acts@{actorId}@runs@last.yaml b/apify-api/openapi/paths/actors/acts@{actorId}@runs@last.yaml index 4e558aeb17..32ac2f4a0b 100644 --- a/apify-api/openapi/paths/actors/acts@{actorId}@runs@last.yaml +++ b/apify-api/openapi/paths/actors/acts@{actorId}@runs@last.yaml @@ -125,6 +125,13 @@ get: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 buildNumber: 0.0.36 containerUrl: "https://g8kd8kbc5ge8.runs.apify.net" isContainerServerReady: true diff --git a/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}.yaml b/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}.yaml index b1718338c1..b9d53be04c 100644 --- a/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}.yaml +++ b/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}.yaml @@ -97,6 +97,13 @@ get: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 buildNumber: 0.0.36 containerUrl: "https://g8kd8kbc5ge8.runs.apify.net" isContainerServerReady: true diff --git a/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}@abort.yaml b/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}@abort.yaml index eecfc7783a..676580126d 100644 --- a/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}@abort.yaml +++ b/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}@abort.yaml @@ -89,6 +89,13 @@ post: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 isContainerServerReady: false gitBranchName: master usage: diff --git a/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}@metamorph.yaml b/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}@metamorph.yaml index b0bf9cd7ac..7f9753f444 100644 --- a/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}@metamorph.yaml +++ b/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}@metamorph.yaml @@ -113,6 +113,13 @@ post: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 metamorphs: - createdAt: "2019-11-30T07:39:24.202Z" actorId: nspoEjklmnsF2oosD diff --git a/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}@resurrect.yaml b/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}@resurrect.yaml index b610d759e2..4cea0cb00e 100644 --- a/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}@resurrect.yaml +++ b/apify-api/openapi/paths/actors/acts@{actorId}@runs@{runId}@resurrect.yaml @@ -128,6 +128,13 @@ post: defaultKeyValueStoreId: eJNzqsbPiopwJcgGQ defaultDatasetId: wmKPijuyDnPZAPRMk defaultRequestQueueId: FL35cSF7jrxr3BY39 + storageIds: + datasets: + default: wmKPijuyDnPZAPRMk + keyValueStores: + default: eJNzqsbPiopwJcgGQ + requestQueues: + default: FL35cSF7jrxr3BY39 buildNumber: 0.0.36 containerUrl: "https://g8kd8kbc5ge8.runs.apify.net" isContainerServerReady: true