diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 839d974d7..542135465 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -9,6 +9,7 @@ on: # Everything should be merged through PRs anyway. branches: - develop + - develop-1.9 - test-ci-* - pypi/publish paths: @@ -23,14 +24,14 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: '3.10' - - uses: actions/cache@v3 + - uses: actions/cache@v4 id: wheels_cache with: path: ./wheels @@ -68,16 +69,16 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - - uses: actions/cache@v3 + - uses: actions/cache@v4 id: conda_cache with: path: | tests/env key: ${{ runner.os }}-test-env-${{ hashFiles('tests/test-env.yml') }} - - uses: conda-incubator/setup-miniconda@v2 + - uses: conda-incubator/setup-miniconda@v3 if: steps.conda_cache.outputs.cache-hit != 'true' with: channels: conda-forge,defaults @@ -85,7 +86,7 @@ jobs: activate-environment: "" # mamba-version: "*" use-mamba: true - miniforge-variant: Mambaforge + miniforge-variant: Miniforge3 - name: Dump Conda Environment Info shell: bash -l {0} @@ -118,10 +119,10 @@ jobs: - build-test-env-base steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Get Conda Environment from Cache - uses: actions/cache@v3 + uses: actions/cache@v4 id: conda_cache with: path: | @@ -148,15 +149,20 @@ jobs: run: | echo "Launching test db" pgdata=$(pwd)/.dbdata - initdb -D ${pgdata} --auth-host=md5 --encoding=UTF8 - pg_ctl -D ${pgdata} -l "${pgdata}/pg.log" start - createdb datacube + sudo chmod 777 /var/run/postgresql + /usr/lib/postgresql/16/bin/initdb -D ${pgdata} --auth-host=md5 --encoding=UTF8 + /usr/lib/postgresql/16/bin/pg_ctl -D ${pgdata} -l "${pgdata}/pg.log" start + /usr/lib/postgresql/16/bin/createdb datacube datacube system init pip list --format=freeze env: ODC_DATACUBE_DB_URL: postgresql:///datacube + # - name: Setup tmate session + # if: ${{ failure() }} + # uses: mxschmitt/action-tmate@v3 + - name: Run Tests shell: bash run: | @@ -179,7 +185,7 @@ jobs: if: | github.repository == 'opendatacube/odc-tools' - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v5 with: fail_ci_if_error: false verbose: false @@ -193,17 +199,17 @@ jobs: - build-wheels steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Get Wheels from Cache - uses: actions/cache@v3 + uses: actions/cache@v4 id: wheels_cache with: path: ./wheels key: wheels-${{ github.sha }} - name: Get Conda Environment from Cache - uses: actions/cache@v3 + uses: actions/cache@v4 id: conda_cache with: path: | @@ -231,9 +237,10 @@ jobs: run: | echo "Launching test db" pgdata=$(pwd)/.dbdata - initdb -D ${pgdata} --auth-host=md5 --encoding=UTF8 - pg_ctl -D ${pgdata} -l "${pgdata}/pg.log" start - createdb datacube + sudo chmod 777 /var/run/postgresql + /usr/lib/postgresql/16/bin/initdb -D ${pgdata} --auth-host=md5 --encoding=UTF8 + /usr/lib/postgresql/16/bin/pg_ctl -D ${pgdata} -l "${pgdata}/pg.log" start + /usr/lib/postgresql/16/bin/createdb datacube datacube system init env: @@ -275,7 +282,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Config if: | @@ -292,9 +299,9 @@ jobs: - name: Setup Python if: steps.cfg.outputs.publish == 'yes' - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: '3.10' - name: Install Twine if: steps.cfg.outputs.publish == 'yes' diff --git a/apps/dc_tools/odc/apps/dc_tools/add_update_products.py b/apps/dc_tools/odc/apps/dc_tools/add_update_products.py index 56287e5d9..9b3cf92a0 100644 --- a/apps/dc_tools/odc/apps/dc_tools/add_update_products.py +++ b/apps/dc_tools/odc/apps/dc_tools/add_update_products.py @@ -14,6 +14,8 @@ import datacube from datacube import Datacube +from datacube.cfg import ODCEnvironment +from datacube.ui.click import environment_option, pass_config from odc.apps.dc_tools.utils import ( update_if_exists_flag, statsd_gauge_reporting, @@ -123,12 +125,16 @@ def add_update_products( @click.command("dc-sync-products") +@environment_option +@pass_config @click.argument("csv-path", nargs=1) @update_if_exists_flag @statsd_setting -def cli(csv_path: str, update_if_exists: bool, statsd_setting: str): +def cli( + cfg_env: ODCEnvironment, csv_path: str, update_if_exists: bool, statsd_setting: str +): # Check we can connect to the Datacube - dc = datacube.Datacube(app="add_update_products") + dc = datacube.Datacube(app="add_update_products", env=cfg_env) logging.info( "Starting up: connected to Datacube, and update-if-exists is: %s", update_if_exists, diff --git a/apps/dc_tools/odc/apps/dc_tools/azure_to_dc.py b/apps/dc_tools/odc/apps/dc_tools/azure_to_dc.py index 99ce6e058..f1a18e804 100644 --- a/apps/dc_tools/odc/apps/dc_tools/azure_to_dc.py +++ b/apps/dc_tools/odc/apps/dc_tools/azure_to_dc.py @@ -10,7 +10,9 @@ import click from datacube import Datacube +from datacube.cfg import ODCEnvironment from datacube.index.hl import Doc2Dataset +from datacube.ui.click import environment_option, pass_config from odc.apps.dc_tools._stac import stac_transform from odc.apps.dc_tools.utils import ( SkippedException, @@ -128,6 +130,8 @@ def dump_list_to_odc( @click.command("azure-to-dc") +@environment_option +@pass_config @update_flag @update_if_exists_flag @allow_unsafe @@ -151,6 +155,7 @@ def dump_list_to_odc( @click.argument("suffix", type=str, nargs=1) @rename_product def cli( + cfg_env: ODCEnvironment, update: bool, update_if_exists: bool, allow_unsafe: bool, @@ -166,7 +171,7 @@ def cli( rename_product: str, ): # Set up the datacube first, to ensure we have a connection - dc = Datacube() + dc = Datacube(env=cfg_env) print(f"Opening AZ Container {container_name} on {account_url}") print(f"Searching on prefix '{prefix}' for files matching suffix '{suffix}'") yaml_urls = find_blobs( diff --git a/apps/dc_tools/odc/apps/dc_tools/cop_dem_to_dc.py b/apps/dc_tools/odc/apps/dc_tools/cop_dem_to_dc.py index 0acd4b788..7123730a7 100644 --- a/apps/dc_tools/odc/apps/dc_tools/cop_dem_to_dc.py +++ b/apps/dc_tools/odc/apps/dc_tools/cop_dem_to_dc.py @@ -14,6 +14,7 @@ from datacube import Datacube from datacube.index.hl import Doc2Dataset +from datacube.ui.click import environment_option, pass_config from datacube.utils import read_documents from odc.apps.dc_tools.utils import ( SkippedException, @@ -202,6 +203,8 @@ def cop_dem_to_dc( @click.command("cop-dem-to-dc") +@environment_option +@pass_config @limit @update_if_exists_flag @bbox @@ -226,6 +229,7 @@ def cop_dem_to_dc( help="Number of threads to use to process, default 20", ) def cli( + cfg_env, limit, update_if_exists, bbox, @@ -244,7 +248,7 @@ def cli( f"Unknown product {product}, must be one of {' '.join(PRODUCTS)}" ) - dc = Datacube() + dc = Datacube(env=cfg_env) if add_product: add_cop_dem_product(dc, product) diff --git a/apps/dc_tools/odc/apps/dc_tools/esa_worldcover_to_dc.py b/apps/dc_tools/odc/apps/dc_tools/esa_worldcover_to_dc.py index 733515e81..e60770533 100644 --- a/apps/dc_tools/odc/apps/dc_tools/esa_worldcover_to_dc.py +++ b/apps/dc_tools/odc/apps/dc_tools/esa_worldcover_to_dc.py @@ -14,6 +14,7 @@ from datacube import Datacube from datacube.index.hl import Doc2Dataset +from datacube.ui.click import environment_option, pass_config from datacube.utils import read_documents from odc.apps.dc_tools.utils import ( bbox, @@ -213,6 +214,8 @@ def esa_wc_to_dc( @click.command("esa-wc-to-dc") +@environment_option +@pass_config @limit @update_if_exists_flag @bbox @@ -238,6 +241,7 @@ def esa_wc_to_dc( help="Select version of world cover map, default 2020", ) def cli( + cfg_env, limit, update_if_exists, bbox, @@ -255,7 +259,7 @@ def cli( # Select map version select_map_version(version) - dc = Datacube() + dc = Datacube(env=cfg_env) if add_product: add_odc_product(dc) diff --git a/apps/dc_tools/odc/apps/dc_tools/fs_to_dc.py b/apps/dc_tools/odc/apps/dc_tools/fs_to_dc.py index f72e77e2a..18e8ede62 100755 --- a/apps/dc_tools/odc/apps/dc_tools/fs_to_dc.py +++ b/apps/dc_tools/odc/apps/dc_tools/fs_to_dc.py @@ -6,6 +6,7 @@ import datacube from datacube.index.hl import Doc2Dataset +from datacube.ui.click import environment_option, pass_config from odc.apps.dc_tools._stac import stac_transform from odc.apps.dc_tools.utils import ( allow_unsafe, @@ -26,6 +27,8 @@ @click.command("fs-to-dc") +@environment_option +@pass_config @click.argument("input_directory", type=str, nargs=1) @update_if_exists_flag @allow_unsafe @@ -39,6 +42,7 @@ help="File system glob to use, defaults to **/*.yaml or **/*.json for STAC.", ) def cli( + cfg_env, input_directory, update_if_exists, allow_unsafe, @@ -48,7 +52,7 @@ def cli( archive_less_mature, publish_action, ): - dc = datacube.Datacube() + dc = datacube.Datacube(env=cfg_env) doc2ds = Doc2Dataset(dc.index) if glob is None: diff --git a/apps/dc_tools/odc/apps/dc_tools/s3_to_dc.py b/apps/dc_tools/odc/apps/dc_tools/s3_to_dc.py index cf3dc0fab..7bb8bc05e 100755 --- a/apps/dc_tools/odc/apps/dc_tools/s3_to_dc.py +++ b/apps/dc_tools/odc/apps/dc_tools/s3_to_dc.py @@ -10,6 +10,7 @@ from datacube import Datacube from datacube.index.hl import Doc2Dataset +from datacube.ui.click import environment_option, pass_config from odc.apps.dc_tools._docs import parse_doc_stream from odc.apps.dc_tools._stac import stac_transform from odc.apps.dc_tools.utils import ( @@ -93,6 +94,8 @@ def dump_to_odc( @click.command("s3-to-dc") +@environment_option +@pass_config @click.option( "--log", type=click.Choice( @@ -118,6 +121,7 @@ def dump_to_odc( @click.argument("uris", nargs=-1) @click.argument("product", type=str, nargs=1, required=False) def cli( + cfg_env, log, skip_lineage, fail_on_missing_lineage, @@ -156,7 +160,7 @@ def cli( if request_payer: opts["RequestPayer"] = "requester" - dc = Datacube() + dc = Datacube(env=cfg_env) # if it's a uri, a product wasn't provided, and 'product' is actually another uri if product.startswith("s3://"): diff --git a/apps/dc_tools/odc/apps/dc_tools/sqs_to_dc.py b/apps/dc_tools/odc/apps/dc_tools/sqs_to_dc.py index a6c945eec..0c1dd8d39 100644 --- a/apps/dc_tools/odc/apps/dc_tools/sqs_to_dc.py +++ b/apps/dc_tools/odc/apps/dc_tools/sqs_to_dc.py @@ -19,6 +19,7 @@ from datacube import Datacube from datacube.index.hl import Doc2Dataset +from datacube.ui.click import environment_option, pass_config from datacube.utils import documents from odc.apps.dc_tools.utils import ( IndexingException, @@ -306,6 +307,8 @@ def queue_to_odc( @click.command("sqs-to-dc") +@environment_option +@pass_config @skip_lineage @fail_on_missing_lineage @verify_lineage @@ -342,6 +345,7 @@ def queue_to_odc( @click.argument("queue_name", type=str, nargs=1) @click.argument("product", type=str, nargs=1) def cli( + cfg_env, skip_lineage, fail_on_missing_lineage, verify_lineage, @@ -369,7 +373,7 @@ def cli( queue = sqs.get_queue_by_name(QueueName=queue_name) # Do the thing - dc = Datacube() + dc = Datacube(env=cfg_env) success, failed, skipped = queue_to_odc( queue, dc, diff --git a/apps/dc_tools/odc/apps/dc_tools/stac_api_to_dc.py b/apps/dc_tools/odc/apps/dc_tools/stac_api_to_dc.py index 99e7378dd..6d2bb4ded 100644 --- a/apps/dc_tools/odc/apps/dc_tools/stac_api_to_dc.py +++ b/apps/dc_tools/odc/apps/dc_tools/stac_api_to_dc.py @@ -10,6 +10,7 @@ import click from datacube import Datacube from datacube.index.hl import Doc2Dataset +from datacube.ui.click import environment_option, pass_config from odc.apps.dc_tools._stac import stac_transform from odc.apps.dc_tools.utils import ( SkippedException, @@ -169,6 +170,8 @@ def stac_api_to_odc( @click.command("stac-to-dc") +@environment_option +@pass_config @limit @update_if_exists_flag @allow_unsafe @@ -202,6 +205,7 @@ def stac_api_to_odc( @publish_action @statsd_setting def cli( + cfg_env, limit, update_if_exists, allow_unsafe, @@ -235,7 +239,7 @@ def cli( config["max_items"] = limit # Do the thing - dc = Datacube() + dc = Datacube(env=cfg_env) added, failed, skipped = stac_api_to_odc( dc, update_if_exists, diff --git a/apps/dc_tools/odc/apps/dc_tools/thredds_to_dc.py b/apps/dc_tools/odc/apps/dc_tools/thredds_to_dc.py index 44a2c15f3..6c4fc4a9c 100644 --- a/apps/dc_tools/odc/apps/dc_tools/thredds_to_dc.py +++ b/apps/dc_tools/odc/apps/dc_tools/thredds_to_dc.py @@ -8,6 +8,8 @@ from typing import List, Tuple from datacube import Datacube +from datacube.cfg import ODCEnvironment +from datacube.ui.click import environment_option, pass_config from odc.apps.dc_tools.utils import statsd_gauge_reporting, statsd_setting from ._docs import from_yaml_doc_stream @@ -48,6 +50,8 @@ def dump_list_to_odc( @click.command("thredds-to-dc") +@environment_option +@pass_config @click.option( "--skip-lineage", is_flag=True, @@ -73,6 +77,7 @@ def dump_list_to_odc( @click.argument("uri", type=str, nargs=1) @click.argument("product", type=str, nargs=1) def cli( + cfg_env: ODCEnvironment, skip_lineage: bool, fail_on_missing_lineage: bool, verify_lineage: bool, @@ -91,7 +96,7 @@ def cli( yaml_contents = download_yamls(yaml_urls) # Consume generator and fetch YAML's - dc = Datacube() + dc = Datacube(env=cfg_env) added, failed = dump_list_to_odc( yaml_contents, dc, diff --git a/apps/dc_tools/odc/apps/dc_tools/utils.py b/apps/dc_tools/odc/apps/dc_tools/utils.py index c528c2412..9d1f7802a 100644 --- a/apps/dc_tools/odc/apps/dc_tools/utils.py +++ b/apps/dc_tools/odc/apps/dc_tools/utils.py @@ -40,12 +40,11 @@ class SkippedException(Exception): ) fail_on_missing_lineage = click.option( - "--fail-on-missing-lineage/--auto-add-lineage", + "--fail-on-missing-lineage", is_flag=True, - default=True, help=( - "Default is to fail if lineage documents not present in the database. " - "Set auto add to try to index lineage documents." + "Default is to permit unindexed/external lineage documents. " + "Set flag to fail if lineage documents are not present in the database." ), ) diff --git a/apps/dc_tools/setup.cfg b/apps/dc_tools/setup.cfg index db26544b9..29d4b29c1 100644 --- a/apps/dc_tools/setup.cfg +++ b/apps/dc_tools/setup.cfg @@ -19,7 +19,10 @@ packages = find_namespace: python_requires = >= 3.10 tests_require = pytest + pytest_httpserver deepdiff + psycopg2 + docker install_requires = click @@ -27,7 +30,7 @@ install_requires = pystac-client>=0.2.0 toolz pyyaml - datacube>=1.9.0-rc9 + datacube>=1.9.0 odc_io odc-cloud[ASYNC]>=0.2.3 odc-geo @@ -35,8 +38,7 @@ install_requires = rio-stac urlpath datadog - eodatasets3 @ git+https://github.com/opendatacube/eo-datasets@integrate-1.9 - importlib_resources>=6.0 + eodatasets3>=1.9 [options.extras_require] tests = diff --git a/apps/dc_tools/tests/conftest.py b/apps/dc_tools/tests/conftest.py index bbdb58d7e..2a91d56a7 100644 --- a/apps/dc_tools/tests/conftest.py +++ b/apps/dc_tools/tests/conftest.py @@ -223,32 +223,37 @@ def postgresql_server(): "db_port": host_port, "db_database": "odc_tools_test", "db_password": "badpassword", - "index_driver": "default", } # 'f"postgresql://odc_tools_test:badpassword@localhost:{host_port}/odc_tools_test", finally: container.remove(v=True, force=True) -@pytest.fixture +@pytest.fixture(scope="module") def odc_test_db( - postgresql_server, tmp_path, monkeypatch + postgresql_server, tmp_path_factory, request ): # pytest: disable=inconsistent-return-statements if postgresql_server == GET_DB_FROM_ENV: - return os.environ["ODC_DATACUBE_DB_URL"] + yield None # return os.environ["ODC_DATACUBE_DB_URL"] else: - temp_datacube_config_file = tmp_path / "test_datacube.conf" + temp_datacube_config_file = ( + tmp_path_factory.mktemp("odc") / "test_datacube.conf" + ) config = configparser.ConfigParser() - config["datacube"] = postgresql_server - postgresql_server["index_driver"] = "postgis" - config["experimental"] = postgresql_server + config["default"] = postgresql_server + config["default"]["index_driver"] = "default" + config["postgis"] = postgresql_server + config["postgis"]["index_driver"] = "postgis" with open(temp_datacube_config_file, "w", encoding="utf8") as fout: config.write(fout) + # Use pytest.MonkeyPatch instead of the monkeypatch fixture + # to enable this fixture to not be function scoped + mp = pytest.MonkeyPatch() # This environment variable points to the configuration file, and is used by the odc-tools CLI apps # as well as direct ODC API access, eg creating `Datacube()` - monkeypatch.setenv( + mp.setenv( "ODC_CONFIG_PATH", str(temp_datacube_config_file.absolute()), ) @@ -259,30 +264,36 @@ def odc_test_db( postgres_url = "postgresql://{db_username}:{db_password}@{db_hostname}:{db_port}/{db_database}".format( **postgresql_server ) - # monkeypatch.setenv("ODC_DATACUBE_DB_URL", postgres_url) + new_db_database = request.module.__name__.replace(".", "_") + new_db_database = new_db_database.replace("-", "_") while True: try: - with psycopg2.connect(postgres_url): - break + conn = psycopg2.connect(postgres_url) + conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) + + with conn.cursor() as cur: + cur.execute(f"CREATE DATABASE {new_db_database};") + break except psycopg2.OperationalError: print("Waiting for PostgreSQL to become available") time.sleep(1) - return postgres_url + yield postgres_url + mp.undo() -@pytest.fixture(scope="module", params=["datacube", "postgis"]) +@pytest.fixture(scope="module", params=["default", "postgis"]) def env_name(request) -> str: return request.param @pytest.fixture(scope="module") -def cfg_env(odc_db, env_name) -> ODCEnvironment: +def cfg_env(odc_test_db, env_name) -> ODCEnvironment: """Provides a :class:`ODCEnvironment` configured with suitable config file paths.""" return ODCConfig()[env_name] @pytest.fixture -def odc_db(odc_test_db, cfg_env): +def odc_db(cfg_env): """ Provide a temporary PostgreSQL server initialised by ODC, usable as the default ODC DB by setting environment variables. @@ -308,18 +319,20 @@ def odc_db(odc_test_db, cfg_env): yield dc dc.close() - if index.name == "pg_index": - pgres_core.drop_db(index._db._engine) # pylint:disable=protected-access - # We need to run this as well, I think because SQLAlchemy grabs them into it's MetaData, - # and attempts to recreate them. WTF TODO FIX - remove_postgres_dynamic_indexes() - # with psycopg2.connect(odc_test_db) as conn: - # with conn.cursor() as cur: - # cur.execute("DROP SCHEMA IF EXISTS agdc CASCADE;") - else: - pgis_core.drop_db(index._db._engine) # pylint:disable=protected-access - remove_postgis_dynamic_indexes() + with index._db._engine.begin() as conn: # pylint:disable=protected-access + if index.name == "pg_index": + pgres_core.drop_db(conn) + # We need to run this as well, I think because SQLAlchemy grabs them into it's MetaData, + # and attempts to recreate them. WTF TODO FIX + remove_postgres_dynamic_indexes() + # with psycopg2.connect(odc_test_db) as conn: + # with conn.cursor() as cur: + # cur.execute("DROP SCHEMA IF EXISTS agdc CASCADE;") + else: + pgis_core.drop_db(conn) + + remove_postgis_dynamic_indexes() def remove_postgres_dynamic_indexes(): @@ -363,7 +376,7 @@ def s2am_dsid(): @pytest.fixture -def odc_db_for_archive(odc_test_db_with_products: Datacube): +def odc_db_for_archive(odc_test_db_with_products: Datacube, env_name): """Create a temporary test database with some pre-indexed datasets.""" # pylint:disable=import-outside-toplevel from odc.apps.dc_tools.fs_to_dc import cli as fs_to_dc_cli @@ -373,7 +386,8 @@ def odc_db_for_archive(odc_test_db_with_products: Datacube): "ga_s2am_ard_3-2-1_49JFM_2016-12-14_final.stac-item.json", ): result = CliRunner().invoke( - fs_to_dc_cli, ["--stac", "--glob", filename, str(TEST_DATA_FOLDER)] + fs_to_dc_cli, + ["--stac", "--glob", filename, str(TEST_DATA_FOLDER), "--env", env_name], ) print(result.output) assert result.exit_code == 0 diff --git a/apps/dc_tools/tests/test_add_update_products.py b/apps/dc_tools/tests/test_add_update_products.py index 023dcffed..6885945de 100644 --- a/apps/dc_tools/tests/test_add_update_products.py +++ b/apps/dc_tools/tests/test_add_update_products.py @@ -32,7 +32,7 @@ def test_load_product_def(remote_product): assert products[0]["name"] == "s2_l2a" -def test_add_products(local_csv, odc_db): +def test_add_products(local_csv, odc_db, env_name): runner = CliRunner() # This will fail if requester pays is enabled result = runner.invoke( @@ -40,6 +40,8 @@ def test_add_products(local_csv, odc_db): [ local_csv, "--update-if-exists", + "--env", + env_name, ], ) print(f"CLI Output: {result.output}") diff --git a/apps/dc_tools/tests/test_cop_dem_to_dc.py b/apps/dc_tools/tests/test_cop_dem_to_dc.py index f33101f9d..d3f03549b 100644 --- a/apps/dc_tools/tests/test_cop_dem_to_dc.py +++ b/apps/dc_tools/tests/test_cop_dem_to_dc.py @@ -42,7 +42,7 @@ def test_complex_bbox(bbox_africa): # Test the actual process @pytest.mark.parametrize("product", PRODUCTS) -def test_indexing_cli(bbox, product, odc_db): +def test_indexing_cli(bbox, product, odc_db, env_name): runner = CliRunner() result = runner.invoke( cop_dem_to_dc_cli, @@ -52,6 +52,8 @@ def test_indexing_cli(bbox, product, odc_db): bbox, "--product", product, + "--env", + env_name, ], ) assert result.exit_code == 0 @@ -67,6 +69,8 @@ def test_indexing_cli(bbox, product, odc_db): bbox, "--product", product, + "--env", + env_name, ], ) assert result.exit_code == 0 diff --git a/apps/dc_tools/tests/test_esa_worldcover_to_dc.py b/apps/dc_tools/tests/test_esa_worldcover_to_dc.py index 4452dcb47..b42737707 100644 --- a/apps/dc_tools/tests/test_esa_worldcover_to_dc.py +++ b/apps/dc_tools/tests/test_esa_worldcover_to_dc.py @@ -53,7 +53,7 @@ def test_complex_bbox(bbox_africa): # Test the actual process -def test_indexing_cli(bbox, odc_test_db_with_products): +def test_indexing_cli(bbox, odc_test_db_with_products, env_name): runner = CliRunner() result = runner.invoke( cli, @@ -62,6 +62,8 @@ def test_indexing_cli(bbox, odc_test_db_with_products): bbox, "--statsd-setting", "localhost:8125", + "--env", + env_name, ], ) assert result.exit_code == 0 diff --git a/apps/dc_tools/tests/test_fs_to_dc.py b/apps/dc_tools/tests/test_fs_to_dc.py index 49edadef9..86184bf40 100644 --- a/apps/dc_tools/tests/test_fs_to_dc.py +++ b/apps/dc_tools/tests/test_fs_to_dc.py @@ -6,7 +6,7 @@ TEST_DATA_FOLDER: Path = Path(__file__).parent.joinpath("data") -def test_fs_to_fc_yaml(test_data_dir, odc_test_db_with_products): +def test_fs_to_fc_yaml(test_data_dir, env_name, odc_test_db_with_products): runner = CliRunner() result = runner.invoke( fs_to_dc_cli, @@ -14,13 +14,15 @@ def test_fs_to_fc_yaml(test_data_dir, odc_test_db_with_products): test_data_dir, "--stac", "--glob=**/NASADEM_HGT_s56w072.stac-item.json", + "--env", + env_name, ], catch_exceptions=False, ) assert result.exit_code == 0 -def test_archive_less_mature(odc_db, test_data_dir, nrt_dsid, final_dsid): +def test_archive_less_mature(odc_db, env_name, test_data_dir, nrt_dsid, final_dsid): dc = odc_db runner = CliRunner() @@ -31,6 +33,8 @@ def test_archive_less_mature(odc_db, test_data_dir, nrt_dsid, final_dsid): test_data_dir, "--glob=**/maturity-nrt.odc-metadata.yaml", "--archive-less-mature", + "--env", + env_name, ], ) assert result.exit_code == 0 @@ -44,6 +48,8 @@ def test_archive_less_mature(odc_db, test_data_dir, nrt_dsid, final_dsid): test_data_dir, "--glob=**/maturity-final.odc-metadata.yaml", "--archive-less-mature", + "--env", + env_name, ], ) assert result.exit_code == 0 @@ -51,7 +57,9 @@ def test_archive_less_mature(odc_db, test_data_dir, nrt_dsid, final_dsid): assert dc.index.datasets.get(nrt_dsid).archived_time is not None -def test_dont_archive_less_mature(odc_db, test_data_dir, nrt_dsid, final_dsid): +def test_dont_archive_less_mature( + odc_db, env_name, test_data_dir, nrt_dsid, final_dsid +): # no archiving should be done if --archive-less-mature is not set dc = odc_db runner = CliRunner() @@ -62,6 +70,8 @@ def test_dont_archive_less_mature(odc_db, test_data_dir, nrt_dsid, final_dsid): [ test_data_dir, "--glob=**/maturity-nrt.odc-metadata.yaml", + "--env", + env_name, ], ) assert result.exit_code == 0 @@ -74,6 +84,8 @@ def test_dont_archive_less_mature(odc_db, test_data_dir, nrt_dsid, final_dsid): [ test_data_dir, "--glob=**/maturity-final.odc-metadata.yaml", + "--env", + env_name, ], ) assert result.exit_code == 0 @@ -81,7 +93,7 @@ def test_dont_archive_less_mature(odc_db, test_data_dir, nrt_dsid, final_dsid): assert dc.index.datasets.get(nrt_dsid).archived_time is None -def test_keep_more_mature(odc_db, test_data_dir, nrt_dsid, final_dsid): +def test_keep_more_mature(odc_db, env_name, test_data_dir, nrt_dsid, final_dsid): dc = odc_db runner = CliRunner() @@ -92,6 +104,8 @@ def test_keep_more_mature(odc_db, test_data_dir, nrt_dsid, final_dsid): test_data_dir, "--glob=**/maturity-final.odc-metadata.yaml", "--archive-less-mature", + "--env", + env_name, ], ) assert result.exit_code == 0 @@ -105,6 +119,8 @@ def test_keep_more_mature(odc_db, test_data_dir, nrt_dsid, final_dsid): test_data_dir, "--glob=**/maturity-nrt.odc-metadata.yaml", "--archive-less-mature", + "--env", + env_name, ], ) assert result.exit_code == 0 diff --git a/apps/dc_tools/tests/test_s3_to_dc.py b/apps/dc_tools/tests/test_s3_to_dc.py index 7c1a177f9..0f1116eb5 100644 --- a/apps/dc_tools/tests/test_s3_to_dc.py +++ b/apps/dc_tools/tests/test_s3_to_dc.py @@ -7,7 +7,7 @@ def test_s3_to_dc_skips_already_indexed_datasets( - mocked_s3_datasets, odc_test_db_with_products + mocked_s3_datasets, odc_test_db_with_products, env_name ): runner = CliRunner() # This will fail if requester pays is enabled @@ -18,6 +18,8 @@ def test_s3_to_dc_skips_already_indexed_datasets( "--no-sign-request", "s3://odc-tools-test/cemp_insar/**/*.yaml", "cemp_insar_alos_displacement", + "--env", + env_name, ], ) for _ in range(1, 3) @@ -38,7 +40,9 @@ def test_s3_to_dc_skips_already_indexed_datasets( ) -def test_s3_to_dc_stac(mocked_s3_datasets, aws_env, odc_test_db_with_products): +def test_s3_to_dc_stac( + mocked_s3_datasets, aws_env, odc_test_db_with_products, env_name +): result = CliRunner().invoke( s3_to_dc, [ @@ -46,6 +50,8 @@ def test_s3_to_dc_stac(mocked_s3_datasets, aws_env, odc_test_db_with_products): "--stac", "s3://odc-tools-test/sentinel-s2-l2a-cogs/31/Q/GB/2020/8/S2B_31QGB_20200831_0_L2A/*_L2A.json", "s2_l2a", + "--env", + env_name, ], catch_exceptions=False, ) @@ -55,7 +61,9 @@ def test_s3_to_dc_stac(mocked_s3_datasets, aws_env, odc_test_db_with_products): ) -def test_s3_to_dc_stac_update_if_exist(mocked_s3_datasets, odc_test_db_with_products): +def test_s3_to_dc_stac_update_if_exist( + mocked_s3_datasets, odc_test_db_with_products, env_name +): result = CliRunner().invoke( s3_to_dc, [ @@ -64,6 +72,8 @@ def test_s3_to_dc_stac_update_if_exist(mocked_s3_datasets, odc_test_db_with_prod "--update-if-exists", "s3://odc-tools-test/sentinel-s2-l2a-cogs/31/Q/GB/2020/8/S2B_31QGB_20200831_0_L2A/*_L2A.json", "s2_l2a", + "--env", + env_name, ], ) assert result.exit_code == 0 @@ -73,7 +83,7 @@ def test_s3_to_dc_stac_update_if_exist(mocked_s3_datasets, odc_test_db_with_prod def test_s3_to_dc_stac_update_if_exist_allow_unsafe( - mocked_s3_datasets, odc_test_db_with_products + mocked_s3_datasets, odc_test_db_with_products, env_name ): runner = CliRunner() result = runner.invoke( @@ -85,6 +95,8 @@ def test_s3_to_dc_stac_update_if_exist_allow_unsafe( "--allow-unsafe", "s3://odc-tools-test/sentinel-s2-l2a-cogs/31/Q/GB/2020/8/S2B_31QGB_20200831_0_L2A/*_L2A.json", "s2_l2a", + "--env", + env_name, ], ) print(f"s3-to-dc exit_code: {result.exit_code}, output:{result.output}") @@ -95,7 +107,7 @@ def test_s3_to_dc_stac_update_if_exist_allow_unsafe( def test_s3_to_dc_fails_to_index_non_dataset_yaml( - mocked_s3_datasets, odc_test_db_with_products + mocked_s3_datasets, odc_test_db_with_products, env_name ): runner = CliRunner() result = runner.invoke( @@ -104,6 +116,8 @@ def test_s3_to_dc_fails_to_index_non_dataset_yaml( "--no-sign-request", "s3://dea-public-data/derivative/ga_ls5t_nbart_gm_cyear_3/3-0-0/x08/y23/1994--P1Y/ga_ls5t_nbart_gm_cyear_3_x08y23_1994--P1Y_final.proc-info.yaml", "ga_ls5t_nbart_gm_cyear_3", + "--env", + env_name, ], catch_exceptions=False, ) @@ -114,7 +128,7 @@ def test_s3_to_dc_fails_to_index_non_dataset_yaml( def test_s3_to_dc_partially_succeeds_when_given_invalid_and_valid_dataset_yamls( - mocked_s3_datasets, odc_test_db_with_products + mocked_s3_datasets, odc_test_db_with_products, env_name ): runner = CliRunner() result = runner.invoke( @@ -125,6 +139,8 @@ def test_s3_to_dc_partially_succeeds_when_given_invalid_and_valid_dataset_yamls( # This folder contains two yaml one valid dataset yaml and another non dataset yaml "s3://odc-tools-test/derivative/ga_ls5t_nbart_gm_cyear_3/3-0-0/x08/y23/1994--P1Y/*.yaml", "ga_ls5t_nbart_gm_cyear_3", + "--env", + env_name, ], ) assert result.exit_code == 1 @@ -133,7 +149,9 @@ def test_s3_to_dc_partially_succeeds_when_given_invalid_and_valid_dataset_yamls( ) -def test_s3_to_dc_list_absolute_urls(mocked_s3_datasets, odc_test_db_with_products): +def test_s3_to_dc_list_absolute_urls( + mocked_s3_datasets, odc_test_db_with_products, env_name +): # provide mulitple uris, as absolute URLs runner = CliRunner() result = runner.invoke( @@ -144,6 +162,8 @@ def test_s3_to_dc_list_absolute_urls(mocked_s3_datasets, odc_test_db_with_produc "s3://odc-tools-test/cemp_insar/04/01/alos_cumul_2010-04-01.yaml", "s3://odc-tools-test/cemp_insar/08/11/alos_cumul_2010-08-11.yaml", "cemp_insar_alos_displacement", + "--env", + env_name, ], ) assert result.exit_code == 0 @@ -152,7 +172,7 @@ def test_s3_to_dc_list_absolute_urls(mocked_s3_datasets, odc_test_db_with_produc ) -def test_s3_to_dc_no_product(mocked_s3_datasets, odc_test_db_with_products): +def test_s3_to_dc_no_product(mocked_s3_datasets, odc_test_db_with_products, env_name): # product should not need to be specified runner = CliRunner() result = runner.invoke( @@ -160,6 +180,8 @@ def test_s3_to_dc_no_product(mocked_s3_datasets, odc_test_db_with_products): [ "--no-sign-request", "s3://odc-tools-test/cemp_insar/01/07/alos_cumul_2010-01-07.yaml", + "--env", + env_name, ], catch_exceptions=False, ) @@ -175,6 +197,8 @@ def test_s3_to_dc_no_product(mocked_s3_datasets, odc_test_db_with_products): "--no-sign-request", "--stac", "s3://odc-tools-test/sentinel-s2-l2a-cogs/31/Q/GB/2020/8/S2B_31QGB_20200831_0_L2A/*_L2A.json", + "--env", + env_name, ], catch_exceptions=False, ) diff --git a/apps/dc_tools/tests/test_sns_publishing.py b/apps/dc_tools/tests/test_sns_publishing.py index b456925ad..f8ed9b74f 100644 --- a/apps/dc_tools/tests/test_sns_publishing.py +++ b/apps/dc_tools/tests/test_sns_publishing.py @@ -71,7 +71,7 @@ def sns_setup(aws_credentials, aws_env): def test_s3_publishing_action_from_stac( - mocked_s3_datasets, odc_test_db_with_products, s2am_dsid, sns_setup + mocked_s3_datasets, odc_test_db_with_products, env_name, s2am_dsid, sns_setup ): _, _, output_topic_arn, sqs, _, output_queue_url = sns_setup @@ -90,6 +90,8 @@ def test_s3_publishing_action_from_stac( f"--publish-action={output_topic_arn}", "s3://odc-tools-test/baseline/ga_s2am_ard_3/49/JFM/2016/12/14/20161214T092514/*stac-item.json", "ga_s2am_ard_3", + "--env", + env_name, ], catch_exceptions=False, ) @@ -109,7 +111,7 @@ def test_s3_publishing_action_from_stac( def test_s3_publishing_action_from_eo3( - mocked_s3_datasets, odc_test_db_with_products, s2am_dsid, sns_setup + mocked_s3_datasets, odc_test_db_with_products, env_name, s2am_dsid, sns_setup ): """Same as above but requiring stac to eo3 conversion""" _, _, output_topic_arn, sqs, _, output_queue_url = sns_setup @@ -127,6 +129,8 @@ def test_s3_publishing_action_from_eo3( f"--publish-action={output_topic_arn}", "s3://odc-tools-test/baseline/ga_s2am_ard_3/49/JFM/2016/12/14/20161214T092514/*odc-metadata.yaml", "ga_s2am_ard_3", + "--env", + env_name, ], catch_exceptions=False, ) @@ -156,7 +160,7 @@ def stac_doc(): def test_sqs_publishing( - aws_credentials, aws_env, stac_doc, odc_test_db_with_products, sns_setup + aws_credentials, aws_env, stac_doc, odc_test_db_with_products, env_name, sns_setup ): """Test that actions are published with sqs_to_dc""" ( @@ -195,6 +199,8 @@ def test_sqs_publishing( "--update-if-exists", "--stac", f"--publish-action={output_topic_arn}", + "--env", + env_name, ], catch_exceptions=False, ) @@ -213,7 +219,13 @@ def test_sqs_publishing( def test_sqs_publishing_archive_flag( - aws_credentials, aws_env, stac_doc, odc_db_for_archive, ls5t_dsid, sns_setup + aws_credentials, + aws_env, + stac_doc, + odc_db_for_archive, + env_name, + ls5t_dsid, + sns_setup, ): """Test that an ARCHIVE SNS message is published when the --archive flag is used.""" ( @@ -246,6 +258,8 @@ def test_sqs_publishing_archive_flag( "--stac", "--archive", f"--publish-action={output_topic_arn}", + "--env", + env_name, ], catch_exceptions=False, ) @@ -263,7 +277,13 @@ def test_sqs_publishing_archive_flag( def test_sqs_publishing_archive_attribute( - aws_credentials, aws_env, stac_doc, odc_db_for_archive, ls5t_dsid, sns_setup + aws_credentials, + aws_env, + stac_doc, + odc_db_for_archive, + env_name, + ls5t_dsid, + sns_setup, ): """Test that archiving occurs when ARCHIVED is in the message attributes""" ( @@ -297,6 +317,8 @@ def test_sqs_publishing_archive_attribute( "--update-if-exists", "--stac", f"--publish-action={output_topic_arn}", + "--env", + env_name, ], catch_exceptions=False, ) @@ -316,6 +338,7 @@ def test_with_archive_less_mature( aws_credentials, aws_env, odc_db, + env_name, nrt_dsid, final_dsid, sns_setup, @@ -333,6 +356,8 @@ def test_with_archive_less_mature( "--glob=**/maturity-nrt.odc-metadata.yaml", "--archive-less-mature", f"--publish-action={output_topic_arn}", + "--env", + env_name, ], catch_exceptions=False, ) @@ -360,6 +385,8 @@ def test_with_archive_less_mature( "--glob=**/maturity-final.odc-metadata.yaml", "--archive-less-mature", f"--publish-action={output_topic_arn}", + "--env", + env_name, ], catch_exceptions=False, ) diff --git a/apps/dc_tools/tests/test_sqs_to_dc.py b/apps/dc_tools/tests/test_sqs_to_dc.py index 13ca3a7f5..a320249c6 100644 --- a/apps/dc_tools/tests/test_sqs_to_dc.py +++ b/apps/dc_tools/tests/test_sqs_to_dc.py @@ -98,12 +98,14 @@ def aws_credentials(): os.environ["AWS_SESSION_TOKEN"] = "testing" -def test_extract_metadata_from_message(aws_credentials, odc_test_db_with_products): +def test_extract_metadata_from_message( + aws_credentials, odc_test_db_with_products, cfg_env +): with mock_aws(): TEST_QUEUE_NAME = "a_test_queue" sqs_resource = boto3.resource("sqs") - dc = Datacube() + dc = Datacube(env=cfg_env) a_queue = sqs_resource.create_queue(QueueName=TEST_QUEUE_NAME) assert int(a_queue.attributes.get("ApproximateNumberOfMessages")) == 0 diff --git a/docker/Dockerfile b/docker/Dockerfile index 38ddedf12..643beda9b 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,6 +1,6 @@ #syntax=docker/dockerfile:1.2 ARG V_PG=16 -ARG V_PGIS=16-postgis-3.4 +ARG V_PGIS=16-postgis-3 FROM osgeo/gdal:ubuntu-small-3.5.3 ENV LC_ALL=C.UTF-8 diff --git a/docker/constraints.txt b/docker/constraints.txt index 7e74a79b6..48be46b6e 100644 --- a/docker/constraints.txt +++ b/docker/constraints.txt @@ -1,235 +1,183 @@ # -# This file is autogenerated by pip-compile with Python 3.8 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --no-annotate --no-build-isolation --output-file=constraints.txt --strip-extras requirements.in # -affine==2.3.0 -aiobotocore==2.4.0 -aiohttp==3.8.1 -aioitertools==0.9.0 -aiosignal==1.2.0 -alabaster==0.7.12 -argon2-cffi==21.3.0 -argon2-cffi-bindings==21.2.0 -astroid==2.9.3 -asttokens==2.0.5 -async-timeout==4.0.2 -attrs==21.4.0 -autodocsumm==0.2.11 -awscli==1.25.60 -azure-core==1.22.1 -azure-storage-blob==12.9.0 -babel==2.9.1 -backcall==0.2.0 -beautifulsoup4==4.12.2 -black==22.1.0 -bleach==4.1.0 -bokeh==2.4.2 -boltons==21.0.0 -boto3==1.24.59 -botocore==1.27.59 -bottleneck==1.3.5 -build==0.8.0 -cachetools==5.0.0 -cattrs==22.2.0 -certifi==2021.10.8 -cffi==1.15.0 -cftime==1.5.2 -charset-normalizer==2.0.12 -ciso8601==2.2.0 -click==8.0.3 +affine==2.4.0 +aiobotocore==2.16.0 +aiohappyeyeballs==2.4.4 +aiohttp==3.11.11 +aioitertools==0.12.0 +aiosignal==1.3.2 +alembic==1.14.0 +antimeridian==0.4.0 +asttokens==3.0.0 +async-timeout==5.0.1 +attrs==24.3.0 +awscli==1.36.22 +azure-core==1.32.0 +azure-storage-blob==12.24.0 +blinker==1.9.0 +bokeh==3.6.2 +boltons==24.1.0 +boto3==1.35.81 +botocore==1.35.81 +bottleneck==1.4.2 +branca==0.8.1 +build==1.2.2.post1 +cachetools==5.5.0 +cattrs==24.1.2 +certifi==2024.12.14 +cffi==1.17.1 +charset-normalizer==3.4.0 +ciso8601==2.3.2 +click==8.1.8 click-plugins==1.1.1 cligj==0.7.2 -cloudpickle==2.0.0 -colorama==0.4.3 -commonmark==0.9.1 -coverage==6.3.1 -cryptography==36.0.1 -dask==2022.2.0 -dask-image==2021.12.0 -datacube==1.8.15 -datadog==0.44.0 -debugpy==1.5.1 +cloudpickle==3.1.0 +colorama==0.4.6 +comm==0.2.2 +contourpy==1.3.1 +cryptography==44.0.0 +dask==2024.10.0 +dask-expr==1.1.16 +dask-image==2024.5.3 +datacube==1.9.0 +datadog==0.50.2 decorator==5.1.1 -deepdiff==5.7.0 +deepdiff==8.1.1 defusedxml==0.7.1 -deprecat==2.1.1 -distributed==2022.02.0 -docker==5.0.3 -docutils==0.15.2 -entrypoints==0.4 -eodatasets3==0.29.5 -exceptiongroup==1.0.1 -executing==0.8.2 -flask==2.2.2 -flask-cors==3.0.10 -frozenlist==1.3.0 -fsspec==2022.1.0 -geoalchemy2==0.12.5 -google-api-core==2.5.0 -google-auth==2.6.0 -google-cloud-core==2.2.2 -google-cloud-storage==2.1.0 -google-crc32c==1.3.0 -google-resumable-media==2.2.1 -googleapis-common-protos==1.54.0 -greenlet==1.1.2 +deprecat==2.1.3 +distributed==2024.10.0 +docker==7.1.0 +docutils==0.16 +eodatasets3==1.9.0 +exceptiongroup==1.2.2 +executing==2.1.0 +fiona==1.10.1 +flask==3.1.0 +flask-cors==5.0.0 +frozenlist==1.5.0 +fsspec==2024.12.0 +geoalchemy2==0.16.0 +google-api-core==2.24.0 +google-auth==2.37.0 +google-cloud-core==2.4.1 +google-cloud-storage==2.19.0 +google-crc32c==1.6.0 +google-resumable-media==2.7.2 +googleapis-common-protos==1.66.0 +greenlet==3.1.1 +h5py==3.12.1 hdstats==0.2.1 -heapdict==1.0.1 -hypothesis==6.36.2 -idna==3.3 -imageio==2.16.0 -imagesize==1.3.0 -importlib-metadata==5.0.0 -importlib-resources==6.0.0 -iniconfig==1.1.1 -ipykernel==6.9.1 -ipyleaflet==0.15.0 -ipython==8.0.1 -ipython-genutils==0.2.0 -ipywidgets==7.6.5 -isodate==0.6.1 -isort==5.10.1 -itsdangerous==2.1.2 -jedi==0.18.1 -jinja2==3.0.3 -jmespath==0.10.0 -jsonschema==3.2.0 -jupyter-client==7.1.2 -jupyter-core==4.9.2 -jupyter-ui-poll==0.2.1 -jupyterlab-pygments==0.1.2 -jupyterlab-widgets==1.0.2 -lark==1.1.2 -lazy-object-proxy==1.7.1 -locket==0.2.1 -lxml==4.7.1 -markupsafe==2.1.1 -matplotlib-inline==0.1.3 -mccabe==0.6.1 -mistune==0.8.4 -moto==4.0.9 -msgpack==1.0.3 -msrest==0.6.21 -multidict==6.0.2 -mypy-extensions==0.4.3 -nbclient==0.5.11 -nbconvert==6.4.2 -nbformat==5.1.3 -nbsphinx==0.9.2 -nest-asyncio==1.5.4 -netcdf4==1.5.8 -networkx==2.6.3 -notebook==6.4.8 -numexpr==2.8.1 -numpy==1.23.3 -oauthlib==3.2.0 -ordered-set==4.0.2 -packaging==21.3 -pandas==1.4.1 -pandocfilters==1.5.0 -parso==0.8.3 -partd==1.2.0 -pathspec==0.9.0 -pep517==0.12.0 -pexpect==4.8.0 -pickleshare==0.7.5 -pillow==9.0.1 -pims==0.5 -pip-tools==6.9.0 -platformdirs==2.5.0 -pluggy==1.0.0 -prometheus-client==0.13.1 -prompt-toolkit==3.0.28 -protobuf==3.19.4 -psutil==5.9.0 -psycopg2==2.9.3 +idna==3.10 +imageio==2.36.1 +importlib-metadata==8.5.0 +iniconfig==2.0.0 +ipyleaflet==0.19.2 +ipython==8.31.0 +ipywidgets==8.1.5 +isodate==0.7.2 +itsdangerous==2.2.0 +jedi==0.19.2 +jinja2==3.1.5 +jmespath==1.0.1 +jsonschema==4.23.0 +jsonschema-specifications==2024.10.1 +jupyter-leaflet==0.19.2 +jupyter-ui-poll==1.0.0 +jupyterlab-widgets==3.0.13 +lark==1.2.2 +lazy-loader==0.4 +locket==1.0.0 +lxml==5.3.0 +lz4==4.3.3 +mako==1.3.8 +markupsafe==3.0.2 +matplotlib-inline==0.1.7 +moto==5.0.24 +msgpack==1.1.0 +multidict==6.1.0 +networkx==3.4.2 +numexpr==2.10.2 +numpy==1.26.4 +odc-geo==0.4.8 +orderly-set==5.2.3 +packaging==24.2 +pandas==2.2.3 +parso==0.8.4 +partd==1.4.2 +pexpect==4.9.0 +pillow==11.0.0 +pims==0.7 +pip-tools==7.4.1 +pluggy==1.5.0 +prompt-toolkit==3.0.48 +propcache==0.2.1 +proto-plus==1.25.0 +protobuf==5.29.2 +psutil==6.1.1 ptyprocess==0.7.0 -pure-eval==0.2.2 -py==1.11.0 -pyasn1==0.4.8 -pyasn1-modules==0.2.8 -pycodestyle==2.8.0 -pycparser==2.21 -pydata-sphinx-theme==0.9.0 -pygments==2.11.2 -pylint==2.12.2 -pyparsing==3.0.7 -pyproj==3.1.0 -pyrsistent==0.18.1 -pystac==1.2.0 -pystac-client==0.3.2 -pytest==7.0.1 -pytest-cov==3.0.0 -pytest-httpserver==1.0.4 -pytest-timeout==2.1.0 -python-dateutil==2.8.2 -python-rapidjson==1.9 -pytz==2021.3 -pywavelets==1.2.0 -pyyaml==5.4.1 -pyzmq==22.3.0 -rasterio==1.3.2 -recommonmark==0.7.1 -requests==2.27.1 -requests-oauthlib==1.3.1 -responses==0.18.0 -rio-stac==0.3.2 +pure-eval==0.2.3 +pyarrow==18.1.0 +pyasn1==0.6.1 +pyasn1-modules==0.4.1 +pycparser==2.22 +pygments==2.18.0 +pyparsing==3.2.0 +pyproj==3.7.0 +pyproject-hooks==1.2.0 +pystac==1.11.0 +pystac-client==0.8.5 +pytest==8.3.4 +python-dateutil==2.9.0.post0 +python-rapidjson==1.20 +pytz==2024.2 +pyyaml==6.0.2 +rasterio==1.4.3 +referencing==0.35.1 +requests==2.32.3 +responses==0.25.3 +rio-stac==0.10.1 +rpds-py==0.22.3 rsa==4.7.2 -ruamel-yaml==0.17.21 -ruamel-yaml-clib==0.2.7 -s3transfer==0.6.0 -scikit-image==0.19.1 -scipy==1.8.0 -send2trash==1.8.0 -shapely==2.0.1 -six==1.16.0 -slicerator==1.0.0 -snowballstemmer==2.2.0 -snuggs==1.4.7 +ruamel-yaml==0.18.6 +ruamel-yaml-clib==0.2.12 +s3transfer==0.10.4 +scikit-image==0.25.0 +scipy==1.14.1 +shapely==2.0.6 +six==1.17.0 +slicerator==1.1.0 sortedcontainers==2.4.0 -soupsieve==2.4.1 -sphinx==4.4.0 -sphinx-autodoc-typehints==1.17.0 -sphinx-click==3.1.0 -sphinxcontrib-applehelp==1.0.2 -sphinxcontrib-devhelp==1.0.2 -sphinxcontrib-htmlhelp==2.0.0 -sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==1.0.3 -sphinxcontrib-serializinghtml==1.1.5 -sqlalchemy==1.4.31 -stack-data==0.2.0 -structlog==22.1.0 -tblib==1.7.0 -terminado==0.13.1 -testpath==0.5.0 +sqlalchemy==2.0.36 +stack-data==0.6.3 +structlog==24.4.0 +tblib==3.0.0 thredds-crawler==1.5.4 -tifffile==2022.2.9 -toml==0.10.2 -tomli==2.0.1 -toolz==0.11.2 -tornado==6.1 -tqdm==4.62.3 -traitlets==5.1.1 +tifffile==2024.12.12 +tomli==2.2.1 +toolz==1.0.0 +tornado==6.4.2 +tqdm==4.67.1 +traitlets==5.14.3 traittypes==0.2.1 -typing-extensions==4.1.1 -urllib3==1.26.8 +typing-extensions==4.12.2 +tzdata==2024.2 +urllib3==2.3.0 urlpath==1.2.0 -wcwidth==0.2.5 -webencodings==0.5.1 -websocket-client==1.2.3 -werkzeug==2.2.2 -wheel==0.37.1 -widgetsnbextension==3.5.2 -wrapt==1.13.3 -xarray==2022.3.0 -xmltodict==0.12.0 -xyzservices==2022.2.0 -yarl==1.7.2 -zict==2.0.0 -zipp==3.10.0 +wcwidth==0.2.13 +werkzeug==3.1.3 +wheel==0.45.1 +widgetsnbextension==4.0.13 +wrapt==1.17.0 +xarray==2024.11.0 +xmltodict==0.14.2 +xyzservices==2024.9.0 +yarl==1.18.3 +zict==3.0.0 +zipp==3.21.0 # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/docker/requirements.in b/docker/requirements.in index 5fd76c819..eaaffc0e6 100644 --- a/docker/requirements.in +++ b/docker/requirements.in @@ -21,15 +21,15 @@ bottleneck>=1.3.5 click # Make sure dask has all the features enabled -dask[complete] -dask_image +dask[complete]>=2023.2.0,<2024.11.0 +dask_image>=2023.2.0,<2024.11.0 -datacube[dev]>=1.9.0-rc9 +datacube[dev]>=1.9.0 datadog # things needed for tests that might not be referenced in setup.py deepdiff docker -# eodatasets3 +eodatasets3>=1.9.0 fsspec google-cloud-storage hdstats>=0.1.7.post5 @@ -38,9 +38,9 @@ ipywidgets jinja2 jupyter_ui_poll numexpr -numpy>=1.23.0 +numpy>=1.24.0,<2 pip-tools -pyproj +pyproj>=3.5 pystac>=1.0.0 pystac-client>=0.2.0 pytest @@ -53,8 +53,7 @@ thredds_crawler toolz tqdm urlpath -xarray==2022.3.0 -importlib_resources>=6.0 +xarray>=2023.9.0 # Moto and Flask diff --git a/libs/cloud/tests/test_azure.py b/libs/cloud/tests/test_azure.py index f0acbc810..6ef55546c 100644 --- a/libs/cloud/tests/test_azure.py +++ b/libs/cloud/tests/test_azure.py @@ -1,9 +1,11 @@ """Test thredds downloader code """ +import pytest from odc.azure import download_yamls, find_blobs +@pytest.mark.xfail(reason="Key based authentication is not permitted on this account") def test_find_blobs(): """Find blobs in a sample Azure account, this will fail if the blob store changes or is removed""" @@ -21,6 +23,7 @@ def test_find_blobs(): assert len(blob_names) == 1 +@pytest.mark.xfail(reason="Key based authentication is not permitted on this account") def test_download_yamls(): """Test pass/fail arms of YAML download from Azure blobstore""" diff --git a/tests/test-env.yml b/tests/test-env.yml index d51c2b979..ab144d119 100644 --- a/tests/test-env.yml +++ b/tests/test-env.yml @@ -10,7 +10,7 @@ dependencies: - python=3.10 # Datacube - # - datacube>=1.8.19 + # - datacube[postgres]>=1.9.0 - sqlalchemy>=2.0 # odc.ui @@ -26,8 +26,7 @@ dependencies: - lxml # needed for thredds-crawler - urlpath - datadog - - eodatasets3 - - importlib_resources>=6.0 + # - eodatasets3>=1.9.0 - odc-geo # odc.{aws,aio}: aiobotocore/boto3 @@ -51,12 +50,13 @@ dependencies: - pip=24 - pip: - - datacube>=1.9.0-rc9 + - datacube[postgres]>=1.9.0 # odc.apps.dc-tools - thredds-crawler - rio-stac # - psycopg2-binary - # - eodatasets3 @ git+https://github.com/opendatacube/eo-datasets@integrate-1.9 + - eodatasets3>=1.9.0 + - docker # odc.ui - jupyter-ui-poll>=0.2.0a