Skip to content

Commit 9d4d427

Browse files
committed
Fix use after cursor closed error
1 parent a6f12ce commit 9d4d427

File tree

2 files changed

+48
-24
lines changed

2 files changed

+48
-24
lines changed

cubedash/index/api.py

Lines changed: 24 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,18 @@
11
from abc import ABC, abstractmethod
22
from collections.abc import Generator, Iterable, Sequence
3+
from contextlib import contextmanager
34
from datetime import date, datetime, timedelta
45
from typing import Any
56
from uuid import UUID
67

78
from datacube.index import Index
89
from datacube.model import Dataset, MetadataType, Product, Range
910
from datacube.model.fields import Field
10-
from sqlalchemy import Result, Row, Select
11+
from sqlalchemy import DDL, Result, Row, Select
1112
from sqlalchemy.sql import ColumnElement
1213
from sqlalchemy.sql.elements import ClauseElement, Label
1314

14-
from cubedash.summary._schema import PleaseRefresh
15+
from cubedash.summary._schema import CUBEDASH_SCHEMA, PleaseRefresh
1516

1617

1718
class EmptyDbError(Exception):
@@ -30,8 +31,28 @@ def __init__(self, name: str, index: Index) -> None:
3031

3132
# need to add an odc_index accessor
3233
def execute_query(self, query):
34+
"""Execute query and return all rows"""
3335
with self.engine.begin() as conn:
34-
return conn.execute(query)
36+
return conn.execute(query).fetchall()
37+
38+
@contextmanager
39+
def execute_query_lazy(self, query):
40+
"""
41+
Execute potentially large query, keeping the connection alive
42+
43+
Example usage::
44+
45+
with explorer_index.execute_query_lazy(query) as result:
46+
for row in result:
47+
"""
48+
with self.engine.connect() as conn:
49+
result = conn.execute(query)
50+
yield result
51+
52+
def drop_all(self):
53+
"""Drop all cubedash-specific tables/schema."""
54+
with self.engine.begin() as conn:
55+
conn.execute(DDL(f"drop schema if exists {CUBEDASH_SCHEMA} cascade"))
3556

3657
def make_dataset(self, row):
3758
# pylint: disable=protected-access

cubedash/summary/_stores.py

Lines changed: 24 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
from pygeofilter.parsers.cql2_text import parse as parse_cql2_text
2828
from shapely.geometry import MultiPolygon
2929
from shapely.geometry.base import BaseGeometry
30-
from sqlalchemy import DDL, Row, RowMapping, String, func, select
30+
from sqlalchemy import Row, RowMapping, String, func, select
3131
from sqlalchemy.dialects import postgresql as postgres
3232
from sqlalchemy.dialects.postgresql import TSTZRANGE
3333
from sqlalchemy.sql import Select
@@ -44,7 +44,7 @@
4444
from cubedash.index import EmptyDbError, ExplorerIndex
4545
from cubedash.index.postgis import ExplorerPgisIndex
4646
from cubedash.index.postgres import ExplorerPgIndex
47-
from cubedash.summary import RegionInfo, TimePeriodOverview, _extents, _schema
47+
from cubedash.summary import RegionInfo, TimePeriodOverview, _extents
4848
from cubedash.summary._extents import ProductArrival, RegionSummary
4949
from cubedash.summary._schema import PleaseRefresh
5050
from cubedash.summary._summarise import DEFAULT_TIMEZONE, Summariser
@@ -609,9 +609,7 @@ def drop_all(self) -> None:
609609
"""
610610
Drop all cubedash-specific tables/schema.
611611
"""
612-
self.e_index.execute_query(
613-
DDL(f"drop schema if exists {_schema.CUBEDASH_SCHEMA} cascade")
614-
)
612+
self.e_index.drop_all()
615613

616614
def get(
617615
self,
@@ -1043,7 +1041,7 @@ def get_count(
10431041
query = self._add_filter_to_query(
10441042
query, field_exprs, filter_lang, filter_cql
10451043
)
1046-
result = self.e_index.execute_query(query).fetchall()
1044+
result = self.e_index.execute_query(query)
10471045

10481046
if len(result) != 0:
10491047
return result[0][0]
@@ -1123,21 +1121,26 @@ def search_items(
11231121
offset
11241122
)
11251123

1126-
for r in self.e_index.execute_query(query):
1127-
yield DatasetItem(
1128-
dataset_id=r.id,
1129-
bbox=_box2d_to_bbox(r.bbox) if r.bbox else None,
1130-
product_name=r.product_name,
1131-
geometry=(
1132-
_get_shape(r.geometry, self.e_index.get_srid_name(r.geometry.srid))
1133-
if r.geometry is not None
1134-
else None
1135-
),
1136-
region_code=r.region_code,
1137-
creation_time=r.creation_time,
1138-
center_time=r.center_time,
1139-
odc_dataset=(self.index.datasets.get(r.id) if full_dataset else None),
1140-
)
1124+
with self.e_index.execute_query_lazy(query) as result:
1125+
for r in result:
1126+
yield DatasetItem(
1127+
dataset_id=r.id,
1128+
bbox=_box2d_to_bbox(r.bbox) if r.bbox else None,
1129+
product_name=r.product_name,
1130+
geometry=(
1131+
_get_shape(
1132+
r.geometry, self.e_index.get_srid_name(r.geometry.srid)
1133+
)
1134+
if r.geometry is not None
1135+
else None
1136+
),
1137+
region_code=r.region_code,
1138+
creation_time=r.creation_time,
1139+
center_time=r.center_time,
1140+
odc_dataset=(
1141+
self.index.datasets.get(r.id) if full_dataset else None
1142+
),
1143+
)
11411144

11421145
def search_collections(
11431146
self,

0 commit comments

Comments
 (0)