Skip to content

Commit 534f276

Browse files
authored
Overwrite default requests user-agent (#7)
* Overwrite default requests user-agent
1 parent e2db1fd commit 534f276

5 files changed

Lines changed: 25 additions & 7 deletions

File tree

scrapingbee/__version__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
__version__ = '1.1.5'

scrapingbee/client.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from requests import request, Response
22

3+
from .default_headers import default_headers
34
from .utils import get_scrapingbee_url, process_headers
45

56

@@ -19,6 +20,9 @@ def request(self, method: str, url: str, params: dict = None, data: dict = None,
1920
if headers:
2021
headers = process_headers(headers)
2122
params['forward_headers'] = True
23+
else:
24+
headers = {}
25+
headers.update(default_headers)
2226

2327
# Add cookies to params
2428
if cookies:

scrapingbee/default_headers.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
from .__version__ import __version__
2+
3+
# Add User-Agent identifying sdk
4+
default_headers = {"User-Agent": f"ScrapingBee-Python/{__version__}"}

setup.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,15 @@
1+
import os
2+
13
from setuptools import setup
24

5+
about = {}
6+
here = os.path.abspath(os.path.dirname(__file__))
7+
with open(os.path.join(here, 'scrapingbee', '__version__.py'), 'r', encoding='utf-8') as f:
8+
exec(f.read(), about)
9+
310
setup(
411
name='scrapingbee',
5-
version='1.1.4',
12+
version=about['__version__'],
613
url='https://github.com/scrapingbee/scrapingbee-python',
714
description='ScrapingBee Python SDK',
815
long_description=open('README.md').read(),

tests/test_client.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import pytest
44

55
from scrapingbee import ScrapingBeeClient
6+
from scrapingbee.default_headers import default_headers
67

78

89
@pytest.fixture(scope='module')
@@ -20,7 +21,7 @@ def test_get(mock_request, client):
2021
'https://app.scrapingbee.com/api/v1/'
2122
'?api_key=API_KEY&url=https%3A//httpbin.org',
2223
data=None,
23-
headers=None
24+
headers=default_headers
2425
)
2526

2627

@@ -34,7 +35,7 @@ def test_get_with_params(mock_request, client):
3435
'https://app.scrapingbee.com/api/v1/'
3536
'?api_key=API_KEY&url=https%3A//httpbin.org&render_js=True',
3637
data=None,
37-
headers=None,
38+
headers=default_headers,
3839
)
3940

4041

@@ -48,7 +49,8 @@ def test_get_with_headers(mock_request, client):
4849
'https://app.scrapingbee.com/api/v1/'
4950
'?api_key=API_KEY&url=https%3A//httpbin.org&forward_headers=True',
5051
data=None,
51-
headers={'Spb-Content-Type': 'text/html; charset=utf-8'},
52+
headers={'Spb-Content-Type': 'text/html; charset=utf-8',
53+
**default_headers},
5254
)
5355

5456

@@ -65,7 +67,7 @@ def test_get_with_cookies(mock_request, client):
6567
'https://app.scrapingbee.com/api/v1/'
6668
'?api_key=API_KEY&url=https%3A//httpbin.org&cookies=name_1=value_1;name_2=value_2',
6769
data=None,
68-
headers=None,
70+
headers=default_headers,
6971
)
7072

7173

@@ -86,7 +88,7 @@ def test_get_with_extract_rules(mock_request, client):
8688
'extract_rules=%7B%22title%22%3A%20%22h1%22%2C%20%22'
8789
'subtitle%22%3A%20%22%23subtitle%22%7D',
8890
data=None,
89-
headers=None,
91+
headers=default_headers,
9092
)
9193

9294

@@ -99,5 +101,5 @@ def test_post(mock_request, client):
99101
'POST',
100102
'https://app.scrapingbee.com/api/v1/?api_key=API_KEY&url=https%3A//httpbin.org',
101103
data={'KEY_1': 'VALUE_1'},
102-
headers=None
104+
headers=default_headers
103105
)

0 commit comments

Comments
 (0)