Skip to content

Commit 0ca7123

Browse files
pre-commit-ci[bot]omad
authored andcommitted
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 385fae6 commit 0ca7123

15 files changed

+151
-152
lines changed

cubedash/_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ def get_products() -> List[ProductWithSummary]:
211211
]
212212
if products and not STORE.list_complete_products():
213213
raise RuntimeError(
214-
"No products are summarised. " "Run `cubedash-gen --all` to generate some."
214+
"No products are summarised. Run `cubedash-gen --all` to generate some."
215215
)
216216

217217
return products

cubedash/_monitoring.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,8 @@ def time_end(response: flask.Response):
2828
render_time = time.time() - flask.g.start_render
2929
response.headers.add_header(
3030
"Server-Timing",
31-
f"app;dur={render_time*1000},"
32-
f'odcquery;dur={flask.g.datacube_query_time*1000};desc="ODC query time",'
31+
f"app;dur={render_time * 1000},"
32+
f'odcquery;dur={flask.g.datacube_query_time * 1000};desc="ODC query time",'
3333
f"odcquerycount_{flask.g.datacube_query_count};"
3434
f'desc="{flask.g.datacube_query_count} ODC queries"',
3535
)
@@ -79,7 +79,7 @@ def decorator(*args, **kwargs):
7979
duration_secs = time.time() - start_time
8080
print(
8181
f"== Index Call == {style(function.__name__, bold=True)}: "
82-
f"{duration_secs*1000}",
82+
f"{duration_secs * 1000}",
8383
file=sys.stderr,
8484
flush=True,
8585
)

cubedash/_stac.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1273,7 +1273,7 @@ def collection_month(collection: str, year: int, month: int):
12731273
date = datetime(year, month, 1).date()
12741274
c = Catalog(
12751275
f"{collection}-{year}-{month}",
1276-
description=f'{collection} for {date.strftime("%B %Y")}',
1276+
description=f"{collection} for {date.strftime('%B %Y')}",
12771277
)
12781278

12791279
c.links.extend(

cubedash/summary/_stores.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1718,9 +1718,9 @@ def _get_shape(geometry: WKBElement, crs) -> Optional[Geometry]:
17181718

17191719
if not shape.is_valid:
17201720
newshape = shape.buffer(0)
1721-
assert math.isclose(
1722-
shape.area, newshape.area, abs_tol=0.0001
1723-
), f"{shape.area} != {newshape.area}"
1721+
assert math.isclose(shape.area, newshape.area, abs_tol=0.0001), (
1722+
f"{shape.area} != {newshape.area}"
1723+
)
17241724
shape = newshape
17251725
return shape
17261726

cubedash/warmup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,7 @@ def _format_time(t: float):
219219
if t > 1:
220220
return f"{t:.1f}s"
221221
else:
222-
return f"{int(t*1000)}ms"
222+
return f"{int(t * 1000)}ms"
223223

224224

225225
if __name__ == "__main__":

integration_tests/asserts.py

Lines changed: 29 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -36,9 +36,9 @@ def assert_shapes_mostly_equal(
3636
# __tracebackhide__ = operator.methodcaller("errisinstance", AssertionError)
3737

3838
# Check area first, as it's a nicer error message when they're wildly different.
39-
assert shape1.area == pytest.approx(
40-
shape2.area, abs=threshold
41-
), "Shapes have different areas"
39+
assert shape1.area == pytest.approx(shape2.area, abs=threshold), (
40+
"Shapes have different areas"
41+
)
4242

4343
s1 = shape1.simplify(tolerance=threshold)
4444
s2 = shape2.simplify(tolerance=threshold)
@@ -135,17 +135,17 @@ def check_dataset_count(html, count: int):
135135
__tracebackhide__ = True
136136
actual = html.find(".dataset-count", first=True).text
137137
expected = f"{count:,d}"
138-
assert (
139-
f"{expected} dataset" in actual
140-
), f"Incorrect dataset count: found {actual} instead of {expected}"
138+
assert f"{expected} dataset" in actual, (
139+
f"Incorrect dataset count: found {actual} instead of {expected}"
140+
)
141141

142142

143143
def check_datesets_page_datestring(html, datestring: str):
144144
__tracebackhide__ = True
145145
actual = html.find(".overview-day-link", first=True).text
146-
assert (
147-
datestring == actual
148-
), f"Incorrect datestring: found {actual} instead of {datestring}"
146+
assert datestring == actual, (
147+
f"Incorrect datestring: found {actual} instead of {datestring}"
148+
)
149149

150150

151151
def expect_values(
@@ -168,22 +168,22 @@ def expect_values(
168168
assert s.dataset_count == dataset_count, "wrong dataset count"
169169
assert s.footprint_count == footprint_count, "wrong footprint count"
170170
if s.footprint_count is not None and s.footprint_count > 0:
171-
assert (
172-
s.footprint_geometry is not None
173-
), "No footprint, despite footprint count"
171+
assert s.footprint_geometry is not None, (
172+
"No footprint, despite footprint count"
173+
)
174174
assert s.footprint_geometry.area > 0, "Empty footprint"
175175

176176
assert s.time_range == time_range, "wrong dataset time range"
177-
assert s.newest_dataset_creation_time == default_utc(
178-
newest_creation_time
179-
), "wrong newest dataset creation"
177+
assert s.newest_dataset_creation_time == default_utc(newest_creation_time), (
178+
"wrong newest dataset creation"
179+
)
180180
assert s.timeline_period == timeline_period, (
181-
f"Should be a {timeline_period}, " f"not {s.timeline_period} timeline"
181+
f"Should be a {timeline_period}, not {s.timeline_period} timeline"
182182
)
183183

184-
assert (
185-
s.summary_gen_time is not None
186-
), "Missing summary_gen_time (there's a default)"
184+
assert s.summary_gen_time is not None, (
185+
"Missing summary_gen_time (there's a default)"
186+
)
187187

188188
assert s.crses == crses, "Wrong dataset CRSes"
189189

@@ -202,16 +202,16 @@ def expect_values(
202202
f"Expected entry with {timeline_count} records."
203203
)
204204
else:
205-
assert (
206-
len(s.timeline_dataset_counts) == timeline_count
207-
), "wrong timeline entry count"
208-
209-
assert (
210-
sum(s.region_dataset_counts.values()) == s.dataset_count
211-
), "region dataset count doesn't match total dataset count"
212-
assert (
213-
sum(s.timeline_dataset_counts.values()) == s.dataset_count
214-
), "timeline count doesn't match dataset count"
205+
assert len(s.timeline_dataset_counts) == timeline_count, (
206+
"wrong timeline entry count"
207+
)
208+
209+
assert sum(s.region_dataset_counts.values()) == s.dataset_count, (
210+
"region dataset count doesn't match total dataset count"
211+
)
212+
assert sum(s.timeline_dataset_counts.values()) == s.dataset_count, (
213+
"timeline count doesn't match dataset count"
214+
)
215215
was_timeline_error = False
216216

217217
if region_dataset_counts is not None:

integration_tests/conftest.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -59,9 +59,9 @@ def _run_cli(cli_method, opts, catch_exceptions=False, expect_success=True):
5959
opts += ("--env", env_name)
6060
result = runner.invoke(cli_method, opts, catch_exceptions=catch_exceptions)
6161
if expect_success:
62-
assert (
63-
0 == result.exit_code
64-
), f"Error for {opts}. Out:\n{indent(result.output, ' ' * 4)}"
62+
assert 0 == result.exit_code, (
63+
f"Error for {opts}. Out:\n{indent(result.output, ' ' * 4)}"
64+
)
6565
return result
6666

6767
return _run_cli

integration_tests/test_center_datetime_logic.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -43,21 +43,21 @@ def test_datestring_on_dataset_page(client: FlaskClient):
4343
def test_datestring_on_datasets_search_page(client: FlaskClient):
4444
html = get_html(client, "/products/rainfall_chirps_daily/datasets")
4545

46-
assert (
47-
"Time UTC: 2019-05-15 00:00:00"
48-
in [
49-
a.find("td", first=True).attrs["title"] for a in html.find(".search-result")
50-
]
51-
), "datestring does not match expected center_time recorded in dataset_spatial table"
46+
assert "Time UTC: 2019-05-15 00:00:00" in [
47+
a.find("td", first=True).attrs["title"] for a in html.find(".search-result")
48+
], (
49+
"datestring does not match expected center_time recorded in dataset_spatial table"
50+
)
5251

5352

5453
def test_datestring_on_regions_page(client: FlaskClient):
5554
html = get_html(client, "/product/rainfall_chirps_daily/regions/x210y106")
5655

57-
assert (
58-
"2019-05-15 00:00:00"
59-
in [a.find("td", first=True).text.strip() for a in html.find(".search-result")]
60-
), "datestring does not match expected center_time recorded in dataset_spatial table"
56+
assert "2019-05-15 00:00:00" in [
57+
a.find("td", first=True).text.strip() for a in html.find(".search-result")
58+
], (
59+
"datestring does not match expected center_time recorded in dataset_spatial table"
60+
)
6161

6262

6363
def test_summary_center_datetime(client: FlaskClient):

integration_tests/test_eo3_support.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -125,9 +125,9 @@ def test_eo3_extents(eo3_index: Index):
125125
),
126126
}
127127
assert footprint.is_valid, "Created footprint is not a valid geometry"
128-
assert (
129-
dataset_extent_row["footprint"].srid == 32650
130-
), "Expected epsg:32650 within the footprint geometry"
128+
assert dataset_extent_row["footprint"].srid == 32650, (
129+
"Expected epsg:32650 within the footprint geometry"
130+
)
131131

132132
assert dataset_extent_row["region_code"] == "113081"
133133
assert dataset_extent_row["size_bytes"] is None
@@ -210,9 +210,9 @@ def test_undo_eo3_doc_compatibility(eo3_index: Index):
210210
with TEST_EO3_DATASET_ARD.open("r") as f:
211211
raw_doc = YAML(typ="safe", pure=True).load(f)
212212

213-
assert (
214-
indexed_doc == raw_doc
215-
), "Document does not match original after undoing compatibility fields."
213+
assert indexed_doc == raw_doc, (
214+
"Document does not match original after undoing compatibility fields."
215+
)
216216

217217

218218
def test_undo_eo3_compatibility_del_handling():

integration_tests/test_page_loads.py

Lines changed: 27 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -154,9 +154,9 @@ def test_all_products_are_shown(client: FlaskClient):
154154
a.text.strip() for a in html.find(".product-selection-header .option-menu-link")
155155
)
156156
indexed_product_names = sorted(p.name for p in _model.STORE.all_products())
157-
assert (
158-
found_product_names == indexed_product_names
159-
), "Product shown in menu don't match the indexed products"
157+
assert found_product_names == indexed_product_names, (
158+
"Product shown in menu don't match the indexed products"
159+
)
160160

161161

162162
def test_get_overview_product_links(client: FlaskClient):
@@ -407,36 +407,36 @@ def test_api_returns_high_tide_comp_datasets(client: FlaskClient):
407407
These are slightly fun to handle as they are a small number with a huge time range.
408408
"""
409409
geojson = get_geojson(client, "/api/datasets/high_tide_comp_20p")
410-
assert (
411-
len(geojson["features"]) == 306
412-
), "Not all high tide datasets returned as geojson"
410+
assert len(geojson["features"]) == 306, (
411+
"Not all high tide datasets returned as geojson"
412+
)
413413

414414
# Search and time summary is only based on center time.
415415
# These searches are within the dataset time range, but not the center_time.
416416
# Dataset range: '2000-01-01T00:00:00' to '2016-10-31T00:00:00'
417417
# year
418418
geojson = get_geojson(client, "/api/datasets/high_tide_comp_20p/2008")
419-
assert (
420-
len(geojson["features"]) == 306
421-
), "Expected high tide datasets within whole dataset range"
419+
assert len(geojson["features"]) == 306, (
420+
"Expected high tide datasets within whole dataset range"
421+
)
422422
# month
423423
geojson = get_geojson(client, "/api/datasets/high_tide_comp_20p/2008/6")
424-
assert (
425-
len(geojson["features"]) == 306
426-
), "Expected high tide datasets within whole dataset range"
424+
assert len(geojson["features"]) == 306, (
425+
"Expected high tide datasets within whole dataset range"
426+
)
427427
# day
428428
geojson = get_geojson(client, "/api/datasets/high_tide_comp_20p/2008/6/1")
429-
assert (
430-
len(geojson["features"]) == 306
431-
), "Expected high tide datasets within whole dataset range"
429+
assert len(geojson["features"]) == 306, (
430+
"Expected high tide datasets within whole dataset range"
431+
)
432432

433433
# Out of the test dataset time range. No results.
434434

435435
# Completely outside of range
436436
geojson = get_geojson(client, "/api/datasets/high_tide_comp_20p/2018")
437-
assert (
438-
len(geojson["features"]) == 0
439-
), "Expected no high tide datasets in in this year"
437+
assert len(geojson["features"]) == 0, (
438+
"Expected no high tide datasets in in this year"
439+
)
440440
# One day before/after (is time zone handling correct?)
441441
geojson = get_geojson(client, "/api/datasets/high_tide_comp_20p/2008/6/2")
442442
assert len(geojson["features"]) == 0, "Expected no result one-day-after center time"
@@ -469,9 +469,9 @@ def test_api_returns_high_tide_comp_regions(client: FlaskClient):
469469
"""
470470

471471
rv: Response = client.get("/api/regions/high_tide_comp_20p")
472-
assert (
473-
rv.status_code == 404
474-
), "High tide comp does not support regions: it should return not-exist code."
472+
assert rv.status_code == 404, (
473+
"High tide comp does not support regions: it should return not-exist code."
474+
)
475475

476476

477477
def test_api_returns_scene_regions(client: FlaskClient):
@@ -908,9 +908,9 @@ def test_with_timings(client: FlaskClient):
908908
for f in rv.headers["Server-Timing"].split(",")
909909
if f.startswith("odcquerycount_")
910910
]
911-
assert (
912-
count_header
913-
), f"No query count server timing header found in {rv.headers['Server-Timing']}"
911+
assert count_header, (
912+
f"No query count server timing header found in {rv.headers['Server-Timing']}"
913+
)
914914

915915
# Example header:
916916
# app;dur=1034.12,odcquery;dur=103.03;desc="ODC query time",odcquerycount_6;desc="6 ODC queries"
@@ -969,9 +969,9 @@ def test_get_robots(client: FlaskClient):
969969
num_lines = len(text.split("\n"))
970970
assert num_lines > 1, "robots.txt should have multiple lines"
971971

972-
assert (
973-
rv.headers["Content-Type"] == "text/plain"
974-
), "robots.txt content-type should be text/plain"
972+
assert rv.headers["Content-Type"] == "text/plain", (
973+
"robots.txt content-type should be text/plain"
974+
)
975975

976976

977977
def test_all_give_404s(client: FlaskClient):

0 commit comments

Comments
 (0)