We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 7215918 commit d93e26dCopy full SHA for d93e26d
13 files changed
.github/workflows/rw-collect-changes.yaml
@@ -48,3 +48,4 @@ jobs:
48
- 'gooddata-dbt/**'
49
- 'gooddata-flight-server/**'
50
- 'gooddata-flexconnect/**'
51
+ - 'gooddata-pipelines/**'
gooddata-pipelines/Makefile
@@ -0,0 +1,16 @@
1
+# (C) 2025 GoodData Corporation
2
+
3
+# Skip tests if running Python 3.9 from CI (gooddata-pipelines doesn't support py39)
4
+ifeq ($(TEST_ENVS),py39)
5
+.PHONY: test-ci
6
+test-ci:
7
+ @echo "Skipping tests for Python 3.9 - gooddata-pipelines doesn't support this version"
8
+ @exit 0
9
10
+.PHONY: test
11
+test:
12
13
14
+else
15
+include ../project_common.mk
16
+endif
gooddata-pipelines/gooddata_pipelines/api/gooddata_sdk.py
@@ -3,6 +3,7 @@
"""Interaction with GoodData Cloud via the Gooddata Python SDK."""
from pathlib import Path
+from typing import Callable
from gooddata_sdk.catalog.permission.declarative_model.permission import (
CatalogDeclarativeWorkspacePermissions,
@@ -23,8 +24,8 @@
23
24
from gooddata_pipelines.api.utils import raise_with_context
25
26
-def apply_to_all_methods(decorator):
27
- def decorate(cls):
+def apply_to_all_methods(decorator: Callable) -> Callable:
28
+ def decorate(cls: type) -> type:
29
for attr in cls.__dict__:
30
if callable(getattr(cls, attr)) and not attr.startswith("__"):
31
setattr(cls, attr, decorator(getattr(cls, attr)))
gooddata-pipelines/gooddata_pipelines/backup_and_restore/backup_manager.py
@@ -105,7 +105,7 @@ def store_user_data_filters(
105
user_data_filters: dict,
106
export_path: Path,
107
ws_id: str,
108
- ):
+ ) -> None:
109
"""Stores the user data filters in the specified export path."""
110
os.mkdir(
111
os.path.join(
@@ -136,7 +136,7 @@ def move_folder(source: Path, destination: Path) -> None:
136
shutil.move(source, destination)
137
138
@staticmethod
139
- def write_to_yaml(path: str, source):
+ def write_to_yaml(path: str, source: Any) -> None:
140
"""Writes the source to a YAML file."""
141
with open(path, "w") as outfile:
142
yaml.dump(source, outfile)
gooddata-pipelines/gooddata_pipelines/backup_and_restore/storage/base_storage.py
@@ -13,6 +13,6 @@ def __init__(self, conf: BackupRestoreConfig):
self.logger = LogObserver()
@abc.abstractmethod
- def export(self, folder, org_id):
+ def export(self, folder: str, org_id: str) -> None:
17
"""Exports the content of the folder to the storage."""
18
raise NotImplementedError
gooddata-pipelines/gooddata_pipelines/backup_and_restore/storage/local_storage.py
@@ -15,14 +15,18 @@ class LocalStorage(BackupStorage):
def __init__(self, conf: BackupRestoreConfig):
super().__init__(conf)
- def _export(self, folder, org_id, export_folder="local_backups") -> None:
+ def _export(
19
+ self, folder: str, org_id: str, export_folder: str = "local_backups"
20
21
"""Copies the content of the folder to local storage as backup."""
22
self.logger.info(f"Saving {org_id} to local storage")
shutil.copytree(
Path(folder), Path(Path.cwd(), export_folder), dirs_exist_ok=True
)
- def export(self, folder, org_id, export_folder="local_backups") -> None:
+ def export(
try:
32
self._export(folder, org_id, export_folder)
gooddata-pipelines/gooddata_pipelines/backup_and_restore/storage/s3_storage.py
@@ -52,7 +52,7 @@ def _verify_connection(self) -> None:
52
f"Failed to connect to S3 bucket {self._config.bucket}: {e}"
53
54
55
- def export(self, folder, org_id) -> None:
56
"""Uploads the content of the folder to S3 as backup."""
57
storage_path = f"{self._config.bucket}/{self._backup_path}"
58
self.logger.info(f"Uploading {org_id} to {storage_path}")
gooddata-pipelines/poetry.lock
gooddata-pipelines/pyproject.toml
@@ -8,13 +8,13 @@ license = { text = "BSD" }
readme = "README.md"
requires-python = ">=3.10"
dependencies = [
- "pydantic (==2.11.3)",
- "requests (==2.32.3)",
- "types-requests (==2.32.0.20250602)",
- "gooddata-sdk (==1.43.0)",
+ "pydantic (>=2.11.3,<3.0.0)",
+ "requests (>=2.32.3,<3.0.0)",
+ "types-requests (>=2.32.0,<3.0.0)",
+ "gooddata-sdk (>=1.43.0,<2.0.0)",
"boto3 (>=1.39.3,<2.0.0)",
"boto3-stubs (>=1.39.3,<2.0.0)",
- "types-pyyaml (==6.0.12.20250326)",
+ "types-pyyaml (>=6.0.12.20250326,<7.0.0)",
]
[tool.mypy]
@@ -29,11 +29,11 @@ line-length = 80
[project.optional-dependencies]
dev = [
- "pytest==8.3.5",
33
- "pytest-mock==3.14.0",
34
- "ruff==0.11.2",
35
- "mypy>=1.16.0",
36
- "moto==5.1.6",
+ "pytest (>=8.3.5,<9.0.0)",
+ "pytest-mock (>=3.14.0,<4.0.0)",
+ "ruff (>=0.11.2,<0.12.0)",
+ "mypy (>=1.16.0,<2.0.0)",
+ "moto (>=5.1.6,<6.0.0)",
37
38
39
[build-system]
gooddata-pipelines/tests/backup_and_restore/test_backup.py
@@ -60,7 +60,7 @@ def backup_manager(mock_logger):
60
61
62
@pytest.fixture()
63
-def s3(aws_credentials):
+def s3():
64
with mock_aws():
65
yield boto3.resource("s3")
66
@@ -274,7 +274,7 @@ def test_local_storage_export(backup_manager):
274
shutil.rmtree("tests/data/local_export")
275
276
277
-def test_file_upload(backup_manager, s3, s3_bucket, mock_boto_session):
+def test_file_upload(backup_manager, s3, s3_bucket):
278
backup_manager.storage.export("tests/data/backup/test_exports", "services")
279
s3.Object(
280
S3_BUCKET,
0 commit comments