-
Notifications
You must be signed in to change notification settings - Fork 39
Expand file tree
/
Copy pathrequester.py
More file actions
189 lines (160 loc) · 5.68 KB
/
requester.py
File metadata and controls
189 lines (160 loc) · 5.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
import json
import logging
import mimetypes
import os
import time
from copy import copy
from typing import Dict, IO, List, Optional, Union
from urllib.parse import urljoin, quote
import requests
from crowdin_api import status
from crowdin_api.exceptions import (
APIException,
AuthenticationFailed,
CrowdinException,
MethodNotAllowed,
NotFound,
ParsingError,
PermissionDenied,
Throttled,
ValidationError,
)
from crowdin_api.parser import dumps, loads
logger = logging.getLogger("crowdin")
class APIRequester:
"""HTTP wrapper."""
exception_map = {
status.HTTP_400_BAD_REQUEST: ValidationError,
status.HTTP_401_UNAUTHORIZED: AuthenticationFailed,
status.HTTP_403_FORBIDDEN: PermissionDenied,
status.HTTP_404_NOT_FOUND: NotFound,
status.HTTP_405_METHOD_NOT_ALLOWED: MethodNotAllowed,
status.HTTP_429_TOO_MANY_REQUESTS: Throttled,
}
default_exception = APIException
default_file_content_type = "application/octet-stream"
default_headers = {"Content-Type": "application/json"}
def __init__(
self,
base_url: str,
timeout: int = 80,
retry_delay: Union[int, float] = 0.1, # 100 ms
max_retries: int = 5,
default_headers: Optional[Dict] = None,
extended_params: Optional[Dict] = None,
):
self.base_url = base_url
self._session = requests.Session()
self._retry_delay = retry_delay
self._max_retries = max_retries
self._extended_params = {} if extended_params is None else extended_params
if not isinstance(self._extended_params, dict):
raise TypeError(f"extended_params must be dict, not {type(self._extended_params)}")
headers = copy(self.default_headers)
headers.update(default_headers or {})
self.session.headers.update(headers)
self._timeout = timeout
@property
def session(self) -> requests.Session:
return self._session
def _clear_data(self, data: Optional[Union[Dict, List]] = None) -> Optional[Union[Dict, List]]:
if data is None:
return data
if isinstance(data, dict):
result = dict()
for key, value in data.items():
if value is None:
continue
if isinstance(value, (dict, list, tuple, set, frozenset)):
value = self._clear_data(data=value)
result[key] = value
elif isinstance(data, (list, tuple, set)):
result = list()
for value in data:
if value is None:
continue
if isinstance(value, (dict, list, tuple, set, frozenset)):
value = self._clear_data(data=value)
result.append(value)
return result
def _request(
self,
method: str,
path: str,
params: Optional[Dict] = None,
headers: Optional[Dict] = None,
request_data: Optional[Dict] = None,
file: IO = None,
**kwargs
):
if file and request_data:
raise CrowdinException("API not support multipart data.")
if file:
headers = headers or {}
request_data = file
file_mime_type = mimetypes.MimeTypes().guess_type(file.name)[0]
headers["Content-Type"] = file_mime_type or self.default_file_content_type
headers["Crowdin-API-FileName"] = quote(os.path.basename(file.name))
elif request_data is not None:
request_data = dumps(self._clear_data(request_data))
kwargs = {**self._extended_params, **kwargs}
result = self.session.request(
method,
urljoin(self.base_url, path),
params=loads(dumps(self._clear_data(params or {}))),
headers=headers,
data=request_data,
timeout=self._timeout,
**kwargs
)
status_code = result.status_code
content = result.content
# Success
if status_code < 200 or status_code > 299:
raise self.exception_map.get(status_code, self.default_exception)(
http_status=status_code, context=content, headers=result.headers, source_headers=headers
)
try:
return loads(content) if content else None
except json.decoder.JSONDecodeError:
raise ParsingError(context=content, http_status=status_code, headers=result.headers)
def request(
self,
method,
path,
params=None,
headers=None,
request_data=None,
file: IO = None,
**kwargs
):
num_retries = 0
while True:
try:
return self._request(
method=method,
path=path,
params=params,
headers=headers,
request_data=request_data,
file=file,
**kwargs
)
except APIException as err:
num_retries += 1
if not err.should_retry or num_retries >= self._max_retries:
raise err
logger.info(
"Initiating retry {num_retries} for request {method} {path} "
"after sleeping {retry_delay} seconds.".format(
retry_delay=self._retry_delay,
num_retries=num_retries,
method=method,
path=path,
)
)
time.sleep(self._retry_delay)
def close(self):
self.session.close()
def __del__(self):
self.close()