Skip to content

Commit bcb759c

Browse files
committed
Update the test and use latest crawlee 0.6.12
1 parent 904f566 commit bcb759c

File tree

3 files changed

+120
-100
lines changed

3 files changed

+120
-100
lines changed

tests/integration/test_actor_api_helpers.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -400,12 +400,12 @@ async def main_server() -> None:
400400
async with Actor:
401401

402402
class WebhookHandler(BaseHTTPRequestHandler):
403-
def do_GET(self) -> None: # noqa: N802
403+
def do_GET(self) -> None:
404404
self.send_response(200)
405405
self.end_headers()
406406
self.wfile.write(bytes('Hello, world!', encoding='utf-8'))
407407

408-
def do_POST(self) -> None: # noqa: N802
408+
def do_POST(self) -> None:
409409
nonlocal webhook_body
410410
content_length = self.headers.get('content-length')
411411
length = int(content_length) if content_length else 0

tests/integration/test_crawlers_with_storages.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -103,8 +103,7 @@ async def default_handler(_: ParselCrawlingContext) -> None:
103103
raise RuntimeError('Some error')
104104

105105
await crawler.run(['http://localhost:8080/'])
106-
# https://github.com/apify/crawlee-python/issues/1326 , should be max_retries
107-
assert failed_counter == max_retries - 1, f'{failed_counter=}'
106+
assert failed_counter == max_retries + 1, f'{failed_counter=}'
108107

109108
actor = await make_actor(label='crawler-max-retries', main_func=main)
110109
run_result = await run_actor(actor)

0 commit comments

Comments
 (0)