|
5 | 5 |
|
6 | 6 | package org.opensearch.flint.spark
|
7 | 7 |
|
| 8 | +import scala.collection.JavaConverters._ |
| 9 | +import scala.concurrent.ExecutionContext.Implicits.global |
| 10 | +import scala.concurrent.Future |
| 11 | + |
| 12 | +import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest |
8 | 13 | import org.opensearch.client.RequestOptions
|
9 | 14 | import org.opensearch.client.indices.CreateIndexRequest
|
10 | 15 | import org.opensearch.common.xcontent.XContentType
|
11 | 16 | import org.opensearch.flint.spark.FlintSparkIndexOptions.OptionName.AUTO_REFRESH
|
12 | 17 | import org.opensearch.flint.spark.covering.FlintSparkCoveringIndex
|
13 | 18 | import org.opensearch.flint.spark.mv.FlintSparkMaterializedView
|
14 | 19 | import org.opensearch.flint.spark.skipping.FlintSparkSkippingIndex
|
| 20 | +import org.scalatest.matchers.should.Matchers |
15 | 21 |
|
16 | 22 | import org.apache.spark.sql.Row
|
| 23 | +import org.apache.spark.sql.functions.col |
17 | 24 |
|
18 |
| -class FlintSparkIndexSqlITSuite extends FlintSparkSuite { |
| 25 | +class FlintSparkIndexSqlITSuite extends FlintSparkSuite with Matchers { |
19 | 26 |
|
20 | 27 | private val testTableName = "index_test"
|
21 | 28 | private val testTableQualifiedName = s"spark_catalog.default.$testTableName"
|
@@ -99,6 +106,42 @@ class FlintSparkIndexSqlITSuite extends FlintSparkSuite {
|
99 | 106 | FlintSparkMaterializedView.getFlintIndexName("spark_catalog.other.mv2"))
|
100 | 107 | }
|
101 | 108 |
|
| 109 | + test("show flint indexes with extended information") { |
| 110 | + // Create and refresh with all existing data |
| 111 | + flint |
| 112 | + .skippingIndex() |
| 113 | + .onTable(testTableQualifiedName) |
| 114 | + .addValueSet("name") |
| 115 | + .options(FlintSparkIndexOptions(Map(AUTO_REFRESH.toString -> "true"))) |
| 116 | + .create() |
| 117 | + flint.refreshIndex(testSkippingFlintIndex) |
| 118 | + val activeJob = spark.streams.active.find(_.name == testSkippingFlintIndex) |
| 119 | + awaitStreamingComplete(activeJob.get.id.toString) |
| 120 | + |
| 121 | + // Trigger next micro batch after 5 seconds with index readonly |
| 122 | + Future { |
| 123 | + Thread.sleep(5000) |
| 124 | + openSearchClient |
| 125 | + .indices() |
| 126 | + .putSettings( |
| 127 | + new UpdateSettingsRequest(testSkippingFlintIndex).settings( |
| 128 | + Map("index.blocks.write" -> true).asJava), |
| 129 | + RequestOptions.DEFAULT) |
| 130 | + sql( |
| 131 | + s"INSERT INTO $testTableQualifiedName VALUES (TIMESTAMP '2023-10-01 04:00:00', 'F', 25, 'Vancouver')") |
| 132 | + } |
| 133 | + |
| 134 | + // Await and store exception as expected |
| 135 | + flint.flintIndexMonitor.awaitMonitor(Some(testSkippingFlintIndex)) |
| 136 | + |
| 137 | + // Assert output contains error message |
| 138 | + val df = sql("SHOW FLINT INDEX EXTENDED IN spark_catalog") |
| 139 | + df.columns should contain("error") |
| 140 | + df.select(col("error")).collect().head.getString(0) should include("OpenSearchException") |
| 141 | + |
| 142 | + deleteTestIndex(testSkippingFlintIndex) |
| 143 | + } |
| 144 | + |
102 | 145 | test("should return empty when show flint index in empty database") {
|
103 | 146 | checkAnswer(sql(s"SHOW FLINT INDEX IN spark_catalog.default"), Seq.empty)
|
104 | 147 | }
|
|
0 commit comments