Skip to content

Commit 22ea8fb

Browse files
committed
Support the SHUB_APIURL and SHUB_STORAGE environment variables
1 parent d479ce5 commit 22ea8fb

File tree

5 files changed

+20
-13
lines changed

5 files changed

+20
-13
lines changed

docs/quickstart.rst

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,15 +23,14 @@ it provides better response time and improved bandwidth usage::
2323
Basic usage
2424
-----------
2525

26-
Instantiate a new client with your Scrapinghub API key::
26+
Instantiate a new client with your Scrapy Cloud API key::
2727

2828
>>> from scrapinghub import ScrapinghubClient
2929
>>> apikey = '84c87545607a4bc0****************' # your API key as a string
3030
>>> client = ScrapinghubClient(apikey)
3131

32-
.. note::
33-
Your Scrapinghub API key is available at https://app.scrapinghub.com/account/apikey
34-
after you sign up with the service.
32+
.. note:: Your Scrapy Cloud API key is available at the bottom of
33+
https://app.zyte.com/o/settings after you sign up.
3534

3635
List your deployed projects::
3736

scrapinghub/client/__init__.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -27,17 +27,23 @@ def request(self, *args, **kwargs):
2727

2828

2929
class ScrapinghubClient(object):
30-
"""Main class to work with Scrapinghub API.
30+
"""Main class to work with the Scrapy Cloud API.
3131
32-
:param auth: (optional) Scrapinghub APIKEY or other SH auth credentials.
33-
If not provided, it will read, respectively, from
32+
:param auth: (optional) Scrapy Cloud API key or other Scrapy Cloud auth
33+
credentials. If not provided, it will read, respectively, from
3434
``SH_APIKEY`` or ``SHUB_JOBAUTH`` environment variables.
3535
``SHUB_JOBAUTH`` is available by default in *Scrapy Cloud*, but it does
3636
not provide access to all endpoints (e.g. job scheduling), but it is allowed
3737
to access job data, collections, crawl frontier.
3838
If you need full access to *Scrapy Cloud* features, you'll need to
39-
provide a Scrapinghub APIKEY through this argument or deploying ``SH_APIKEY``.
40-
:param dash_endpoint: (optional) Scrapinghub Dash panel url.
39+
provide a Scrapy Cloud API key through this argument or deploying
40+
``SH_APIKEY``.
41+
:param dash_endpoint: (optional) Scrapy Cloud API URL.
42+
If not provided, it will be read from the ``SHUB_APIURL`` environment
43+
variable, or fall back to ``"https://app.zyte.com/api/"``.
44+
:param endpoint: (optional) Scrapy Cloud storage API URL.
45+
If not provided, it will be read from the ``SHUB_STORAGE`` environment
46+
variable, or fall back to ``"https://storage.scrapinghub.com/"``.
4147
:param \*\*kwargs: (optional) Additional arguments for
4248
:class:`~scrapinghub.hubstorage.HubstorageClient` constructor.
4349

scrapinghub/hubstorage/client.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22
High level Hubstorage client
33
"""
44
import logging
5+
import os
6+
57
from requests import session, HTTPError, ConnectionError, Timeout
68
from retrying import Retrying
79
from .utils import xauth, urlpathjoin
@@ -78,7 +80,7 @@ def __init__(self, auth=None, endpoint=None, connection_timeout=None,
7880
use_msgpack (bool): Flag to enable/disable msgpack use for serialization
7981
"""
8082
self.auth = xauth(auth)
81-
self.endpoint = endpoint or self.DEFAULT_ENDPOINT
83+
self.endpoint = endpoint or os.getenv("SHUB_STORAGE", self.DEFAULT_ENDPOINT)
8284
self.connection_timeout = connection_timeout or self.DEFAULT_CONNECTION_TIMEOUT_S
8385
self.user_agent = user_agent or self.DEFAULT_USER_AGENT
8486
self.session = self._create_session()

scrapinghub/legacy.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ class Connection(object):
3030
"""Main class to access Scrapinghub API.
3131
"""
3232

33-
DEFAULT_ENDPOINT = 'https://app.scrapinghub.com/api/'
33+
DEFAULT_ENDPOINT = 'https://app.zyte.com/api/'
3434

3535
API_METHODS = {
3636
'addversion': 'scrapyd/addversion',
@@ -66,7 +66,7 @@ def __init__(self, apikey=None, password='', _old_passwd='',
6666
warnings.warn("A lot of endpoints support authentication only via apikey.", stacklevel=2)
6767
self.apikey = apikey
6868
self.password = password or ''
69-
self.url = url or self.DEFAULT_ENDPOINT
69+
self.url = url or os.getenv("SHUB_APIURL", self.DEFAULT_ENDPOINT)
7070
self._session = self._create_session()
7171
self._connection_timeout = connection_timeout
7272

tests/legacy/test_connection.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111

1212

1313
def test_connection_class_attrs():
14-
assert Connection.DEFAULT_ENDPOINT == 'https://app.scrapinghub.com/api/'
14+
assert Connection.DEFAULT_ENDPOINT == 'https://app.zyte.com/api/'
1515
assert isinstance(Connection.API_METHODS, dict)
1616

1717

0 commit comments

Comments
 (0)