3737 S3Storage ,
3838)
3939from gooddata_pipelines .logger import LogObserver
40+ from gooddata_pipelines .utils .rate_limiter import RateLimiter
4041
4142
4243@dataclass
@@ -58,6 +59,11 @@ def __init__(self, host: str, token: str, config: BackupRestoreConfig):
5859
5960 self .loader = BackupInputProcessor (self ._api , self .config .api_page_size )
6061
62+ self ._api_rate_limiter = RateLimiter (
63+ calls_per_second = self .config .api_calls_per_second ,
64+ name = "GoodData_API_RateLimiter" ,
65+ )
66+
6167 @classmethod
6268 def create (
6369 cls : Type ["BackupManager" ],
@@ -93,11 +99,12 @@ def _get_storage(conf: BackupRestoreConfig) -> BackupStorage:
9399
94100 def get_user_data_filters (self , ws_id : str ) -> dict :
95101 """Returns the user data filters for the specified workspace."""
96- response : requests .Response = self ._api .get_user_data_filters (ws_id )
97- if response .ok :
98- return response .json ()
99- else :
100- raise RuntimeError (f"{ response .status_code } : { response .text } " )
102+ with self ._api_rate_limiter :
103+ response : requests .Response = self ._api .get_user_data_filters (ws_id )
104+ if response .ok :
105+ return response .json ()
106+ else :
107+ raise RuntimeError (f"{ response .status_code } : { response .text } " )
101108
102109 def _store_user_data_filters (
103110 self ,
@@ -142,14 +149,17 @@ def _write_to_yaml(path: str, source: Any) -> None:
142149
143150 def _get_automations_from_api (self , workspace_id : str ) -> Any :
144151 """Returns automations for the workspace as JSON."""
145- response : requests .Response = self ._api .get_automations (workspace_id )
146- if response .ok :
147- return response .json ()
148- else :
149- raise RuntimeError (
150- f"Failed to get automations for { workspace_id } . "
151- + f"{ response .status_code } : { response .text } "
152+ with self ._api_rate_limiter :
153+ response : requests .Response = self ._api .get_automations (
154+ workspace_id
152155 )
156+ if response .ok :
157+ return response .json ()
158+ else :
159+ raise RuntimeError (
160+ f"Failed to get automations for { workspace_id } . "
161+ + f"{ response .status_code } : { response .text } "
162+ )
153163
154164 def _store_automations (self , export_path : Path , workspace_id : str ) -> None :
155165 """Stores the automations in the specified export path."""
@@ -181,7 +191,8 @@ def store_declarative_filter_views(
181191 ) -> None :
182192 """Stores the filter views in the specified export path."""
183193 # Get the filter views YAML files from the API
184- self ._api .store_declarative_filter_views (workspace_id , export_path )
194+ with self ._api_rate_limiter :
195+ self ._api .store_declarative_filter_views (workspace_id , export_path )
185196
186197 # Move filter views to the subfolder containing the analytics model
187198 self ._move_folder (
@@ -229,7 +240,10 @@ def _get_workspace_export(
229240 # the SDK. That way we could save and package all the declarations
230241 # directly instead of reorganizing the folder structures. That should
231242 # be more transparent/readable and possibly safer for threading
232- self ._api .store_declarative_workspace (workspace_id , export_path )
243+ with self ._api_rate_limiter :
244+ self ._api .store_declarative_workspace (
245+ workspace_id , export_path
246+ )
233247 self .store_declarative_filter_views (export_path , workspace_id )
234248 self ._store_automations (export_path , workspace_id )
235249
0 commit comments