-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathpyproject.toml
More file actions
103 lines (92 loc) · 2.9 KB
/
pyproject.toml
File metadata and controls
103 lines (92 loc) · 2.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
[build-system]
# NOTE - versions of Hatchling more recent than 1.8.0 have a dependency on pathspec >= 0.10.1. However,
# Databricks Runtime versions only have pathspec==0.9.0 installed, and so attempting to `pip install ...`
# syncsparkpy will fail with an `AttributeError`. Pinning Hatching to this version, which is the most
# recent version to only require pathspec >= 0.9.0, resolves that issue without asking the user to
# install pathspec >= 0.10.1 on their Databricks clusters. We should only ever upgrade this version once
# most/all Databricks Runtime LTS releases support pathspec >= 0.10.1, which may be a while...
# See here for DBR release notes - https://docs.databricks.com/release-notes/runtime/releases.html
build-backend = "hatchling.build"
requires = ["hatchling==1.8.0"]
[project]
authors = [{ "name" = "Sync Computing" }]
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python",
]
dependencies = [
"azure-cli-core==2.50.0",
"azure-identity==1.13.0",
"azure-mgmt-compute==30.1.0",
"azure-mgmt-resource==23.0.1",
"boto3>=1.26.0,<2.0.0",
"click~=8.1.0",
"httpx~=0.23",
"platformdirs",
"pydantic~=1.10.0",
"python-dateutil>=2.7,<3",
"tenacity==8.2.2",
]
dynamic = ["description", "version"]
name = "syncsparkpy"
readme = "README.md"
requires-python = ">=3.9"
[project.optional-dependencies]
dev = [
"Sphinx==4.3.0",
"deepdiff==6.3.0",
"pre-commit==4.0.1",
"pytest-asyncio==0.21.0",
"pytest-env==0.8.1",
"pytest==7.2.0",
"respx==0.20.1",
"ruff==0.7.2",
]
[tool.hatch.version]
path = "sync/__init__.py"
[project.scripts]
sync-cli = "sync.__main__:main"
[project.urls]
Home = "https://github.com/synccomputingcode/syncsparkpy"
[tool.pytest.ini_options]
env = [
"AWS_DEFAULT_REGION=us-east-1",
"SYNC_API_KEY_ID=fake",
"SYNC_API_KEY_SECRET=fake",
]
filterwarnings = ["ignore::UserWarning"]
pythonpath = ["."]
[tool.ruff]
exclude = ["artifacts/*"]
line-length = 100
target-version = "py39"
[tool.ruff.lint]
ignore = ["E501"]
preview = true
select = ["ASYNC", "B", "C9", "E", "F", "I", "PLE", "RUF", "TID", "UP", "W"]
[tool.ruff.lint.flake8-bugbear]
extend-immutable-calls = [
# Whitelisted default arguments
"Security",
"Tracker",
"app.api.deps.Tracker",
"fastapi.Depends",
"fastapi.Header",
"fastapi.Query",
"fastapi.Security",
]
[tool.ruff.lint.mccabe]
max-complexity = 20
[tool.pyright]
pythonPlatform = "All"
pythonVersion = "3.9"
reportUnnecessaryTypeIgnoreComment = "error"
typeCheckingMode = "standard"
useLibraryCodeForTypes = false